This is an automated email from the ASF dual-hosted git repository. yangjie01 pushed a commit to branch branch-3.4 in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.4 by this push: new 44c79d9600f [SPARK-44034][TESTS][3.4] Add a new test group for sql module 44c79d9600f is described below commit 44c79d9600f14d1bb9db7ccb2b8b8e5a6d40459e Author: yangjie01 <yangji...@baidu.com> AuthorDate: Thu Sep 28 10:10:51 2023 +0800 [SPARK-44034][TESTS][3.4] Add a new test group for sql module ### What changes were proposed in this pull request? The purpose of this pr is to add a new test tag `SlowSQLTest` to the sql module, and identified some Suites with test cases more than 3 seconds, and apply it to GA testing task to reduce the testing pressure of the `sql others` group. ### Why are the changes needed? For a long time, the sql module UTs has only two groups: `slow` and `others`. The test cases in group `slow` are fixed, while the number of test cases in group `others` continues to increase, which has had a certain impact on the testing duration and stability of group `others`. So this PR proposes to add a new testing group to share the testing pressure of `sql others` group, which has made the testing time of the three groups more average, and hope it can improve the stability of the GA task. ### Does this PR introduce _any_ user-facing change? No ### How was this patch tested? Should monitor GA ### Was this patch authored or co-authored using generative AI tooling? No Closes #43141 from LuciferYang/SPARK-44034-34. Authored-by: yangjie01 <yangji...@baidu.com> Signed-off-by: yangjie01 <yangji...@baidu.com> --- .github/workflows/build_and_test.yml | 8 +++++++- .../java/org/apache/spark/tags/SlowSQLTest.java | 21 ++++++++++----------- .../spark/sql/ApproximatePercentileQuerySuite.scala | 2 ++ .../org/apache/spark/sql/CachedTableSuite.scala | 2 ++ .../apache/spark/sql/DataFrameAsOfJoinSuite.scala | 2 ++ .../scala/org/apache/spark/sql/DataFrameSuite.scala | 2 ++ .../spark/sql/DataFrameWindowFunctionsSuite.scala | 2 ++ .../org/apache/spark/sql/DatasetCacheSuite.scala | 3 ++- .../test/scala/org/apache/spark/sql/JoinSuite.scala | 2 ++ .../WriteDistributionAndOrderingSuite.scala | 2 ++ .../sql/execution/BroadcastExchangeSuite.scala | 2 ++ .../execution/OptimizeMetadataOnlyQuerySuite.scala | 2 ++ .../spark/sql/execution/QueryExecutionSuite.scala | 3 ++- .../execution/adaptive/AdaptiveQueryExecSuite.scala | 2 ++ .../FileSourceAggregatePushDownSuite.scala | 5 +++++ .../execution/datasources/V1WriteCommandSuite.scala | 2 ++ .../parquet/ParquetRebaseDatetimeSuite.scala | 3 +++ .../datasources/parquet/ParquetRowIndexSuite.scala | 2 ++ .../execution/streaming/state/RocksDBSuite.scala | 2 ++ .../sql/execution/ui/AllExecutionsPageSuite.scala | 2 ++ .../spark/sql/expressions/ExpressionInfoSuite.scala | 2 ++ .../spark/sql/sources/BucketedReadSuite.scala | 2 ++ .../spark/sql/sources/BucketedWriteSuite.scala | 2 ++ .../DisableUnnecessaryBucketedScanSuite.scala | 3 +++ .../streaming/AcceptsLatestSeenOffsetSuite.scala | 2 ++ .../DeprecatedStreamingAggregationSuite.scala | 2 ++ .../sql/streaming/EventTimeWatermarkSuite.scala | 2 ++ .../spark/sql/streaming/FileStreamSinkSuite.scala | 3 +++ .../spark/sql/streaming/FileStreamSourceSuite.scala | 5 ++++- .../spark/sql/streaming/FileStreamStressSuite.scala | 2 ++ ...apGroupsInPandasWithStateDistributionSuite.scala | 2 ++ .../FlatMapGroupsInPandasWithStateSuite.scala | 2 ++ .../FlatMapGroupsWithStateDistributionSuite.scala | 2 ++ .../sql/streaming/FlatMapGroupsWithStateSuite.scala | 2 ++ ...latMapGroupsWithStateWithInitialStateSuite.scala | 2 ++ .../sql/streaming/MemorySourceStressSuite.scala | 2 ++ .../sql/streaming/MultiStatefulOperatorsSuite.scala | 2 ++ .../apache/spark/sql/streaming/StreamSuite.scala | 2 ++ .../sql/streaming/StreamingAggregationSuite.scala | 2 ++ .../sql/streaming/StreamingDeduplicationSuite.scala | 2 ++ .../spark/sql/streaming/StreamingJoinSuite.scala | 5 +++++ .../sql/streaming/StreamingQueryListenerSuite.scala | 2 ++ .../sql/streaming/StreamingQueryManagerSuite.scala | 2 ++ .../spark/sql/streaming/StreamingQuerySuite.scala | 2 ++ .../StreamingSessionWindowDistributionSuite.scala | 2 ++ .../sql/streaming/StreamingSessionWindowSuite.scala | 2 ++ ...treamingStateStoreFormatCompatibilitySuite.scala | 2 ++ .../sql/streaming/TriggerAvailableNowSuite.scala | 2 ++ .../ContinuousQueryStatusAndProgressSuite.scala | 2 ++ .../sql/streaming/continuous/ContinuousSuite.scala | 4 ++++ .../sources/StreamingDataSourceV2Suite.scala | 3 +++ .../test/DataStreamReaderWriterSuite.scala | 2 ++ .../streaming/test/DataStreamTableAPISuite.scala | 2 ++ .../sql/streaming/ui/StreamingQueryPageSuite.scala | 2 ++ .../spark/sql/streaming/ui/UISeleniumSuite.scala | 3 +++ 55 files changed, 138 insertions(+), 15 deletions(-) diff --git a/.github/workflows/build_and_test.yml b/.github/workflows/build_and_test.yml index 4f9978b0414..aff173b8e51 100644 --- a/.github/workflows/build_and_test.yml +++ b/.github/workflows/build_and_test.yml @@ -177,12 +177,18 @@ jobs: hadoop: ${{ inputs.hadoop }} hive: hive2.3 included-tags: org.apache.spark.tags.ExtendedSQLTest + comment: "- extended tests" + - modules: sql + java: ${{ inputs.java }} + hadoop: ${{ inputs.hadoop }} + hive: hive2.3 + included-tags: org.apache.spark.tags.SlowSQLTest comment: "- slow tests" - modules: sql java: ${{ inputs.java }} hadoop: ${{ inputs.hadoop }} hive: hive2.3 - excluded-tags: org.apache.spark.tags.ExtendedSQLTest + excluded-tags: org.apache.spark.tags.ExtendedSQLTest,org.apache.spark.tags.SlowSQLTest comment: "- other tests" env: MODULES_TO_TEST: ${{ matrix.modules }} diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala b/common/tags/src/test/java/org/apache/spark/tags/SlowSQLTest.java similarity index 69% copy from sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala copy to common/tags/src/test/java/org/apache/spark/tags/SlowSQLTest.java index 7f2972edea7..63211aa23b3 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala +++ b/common/tags/src/test/java/org/apache/spark/tags/SlowSQLTest.java @@ -15,17 +15,16 @@ * limitations under the License. */ -package org.apache.spark.sql.streaming +package org.apache.spark.tags; -import org.apache.spark.sql.execution.streaming._ +import org.scalatest.TagAnnotation; -class MemorySourceStressSuite extends StreamTest { - import testImplicits._ +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; - test("memory stress test") { - val input = MemoryStream[Int] - val mapped = input.toDS().map(_ + 1) - - runStressTest(mapped, AddData(input, _: _*)) - } -} +@TagAnnotation +@Retention(RetentionPolicy.RUNTIME) +@Target({ElementType.METHOD, ElementType.TYPE}) +public @interface SlowSQLTest { } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/ApproximatePercentileQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/ApproximatePercentileQuerySuite.scala index 8598e92f029..273e8e08fd7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/ApproximatePercentileQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/ApproximatePercentileQuerySuite.scala @@ -25,10 +25,12 @@ import org.apache.spark.sql.catalyst.expressions.aggregate.ApproximatePercentile import org.apache.spark.sql.catalyst.expressions.aggregate.ApproximatePercentile.PercentileDigest import org.apache.spark.sql.catalyst.util.DateTimeUtils import org.apache.spark.sql.test.SharedSparkSession +import org.apache.spark.tags.SlowSQLTest /** * End-to-end tests for approximate percentile aggregate function. */ +@SlowSQLTest class ApproximatePercentileQuerySuite extends QueryTest with SharedSparkSession { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala index 5548108b915..166c271f662 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/CachedTableSuite.scala @@ -45,11 +45,13 @@ import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils} import org.apache.spark.sql.types.{StringType, StructField, StructType} import org.apache.spark.storage.{RDDBlockId, StorageLevel} import org.apache.spark.storage.StorageLevel.{MEMORY_AND_DISK_2, MEMORY_ONLY} +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.unsafe.types.CalendarInterval import org.apache.spark.util.{AccumulatorContext, Utils} private case class BigData(s: String) +@SlowSQLTest class CachedTableSuite extends QueryTest with SQLTestUtils with SharedSparkSession with AdaptiveSparkPlanHelper { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala index 749efe95c5d..e0dfd54e7b3 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameAsOfJoinSuite.scala @@ -23,7 +23,9 @@ import org.apache.spark.sql.execution.adaptive.AdaptiveSparkPlanHelper import org.apache.spark.sql.functions._ import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types._ +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class DataFrameAsOfJoinSuite extends QueryTest with SharedSparkSession with AdaptiveSparkPlanHelper { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala index 7ccaa7661b5..9ddb4abe98b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameSuite.scala @@ -50,10 +50,12 @@ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.{ExamplePoint, ExamplePointUDT, SharedSparkSession} import org.apache.spark.sql.test.SQLTestData.{ArrayStringWrapper, ContainerStringWrapper, DecimalData, StringWrapper, TestData2} import org.apache.spark.sql.types._ +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.unsafe.types.CalendarInterval import org.apache.spark.util.Utils import org.apache.spark.util.random.XORShiftRandom +@SlowSQLTest class DataFrameSuite extends QueryTest with SharedSparkSession with AdaptiveSparkPlanHelper { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala index 990c1e1b2de..3f8e2588fbc 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DataFrameWindowFunctionsSuite.scala @@ -31,10 +31,12 @@ import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types._ +import org.apache.spark.tags.SlowSQLTest /** * Window function testing for DataFrame API. */ +@SlowSQLTest class DataFrameWindowFunctionsSuite extends QueryTest with SharedSparkSession with AdaptiveSparkPlanHelper { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala index 48b20cc9094..a657c6212aa 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/DatasetCacheSuite.scala @@ -25,8 +25,9 @@ import org.apache.spark.sql.execution.columnar.{InMemoryRelation, InMemoryTableS import org.apache.spark.sql.functions._ import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.storage.StorageLevel +import org.apache.spark.tags.SlowSQLTest - +@SlowSQLTest class DatasetCacheSuite extends QueryTest with SharedSparkSession with TimeLimits diff --git a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala index c5ecc1bc841..832b6ee5c32 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/JoinSuite.scala @@ -37,7 +37,9 @@ import org.apache.spark.sql.execution.python.BatchEvalPythonExec import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types.StructType +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class JoinSuite extends QueryTest with SharedSparkSession with AdaptiveSparkPlanHelper { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/connector/WriteDistributionAndOrderingSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/connector/WriteDistributionAndOrderingSuite.scala index 3a7d2359005..d4a6d221365 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/connector/WriteDistributionAndOrderingSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/connector/WriteDistributionAndOrderingSuite.scala @@ -42,7 +42,9 @@ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.streaming.{StreamingQueryException, Trigger} import org.apache.spark.sql.types.{DateType, IntegerType, LongType, ObjectType, StringType, StructType, TimestampType} import org.apache.spark.sql.util.QueryExecutionListener +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class WriteDistributionAndOrderingSuite extends DistributionAndOrderingSuiteBase { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/BroadcastExchangeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/BroadcastExchangeSuite.scala index 129f76d7be3..719bac56a66 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/BroadcastExchangeSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/BroadcastExchangeSuite.scala @@ -29,6 +29,7 @@ import org.apache.spark.sql.execution.joins.HashedRelation import org.apache.spark.sql.functions.broadcast import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession +import org.apache.spark.tags.ExtendedSQLTest class BroadcastExchangeSuite extends SparkPlanTest with SharedSparkSession @@ -98,6 +99,7 @@ class BroadcastExchangeSuite extends SparkPlanTest } // Additional tests run in 'local-cluster' mode. +@ExtendedSQLTest class BroadcastExchangeExecSparkSuite extends SparkFunSuite with LocalSparkContext with AdaptiveSparkPlanHelper { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/OptimizeMetadataOnlyQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/OptimizeMetadataOnlyQuerySuite.scala index 68691e2f7fd..cbc565974cd 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/OptimizeMetadataOnlyQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/OptimizeMetadataOnlyQuerySuite.scala @@ -24,7 +24,9 @@ import org.apache.spark.sql.catalyst.plans.logical.LocalRelation import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.SQLConf.OPTIMIZER_METADATA_ONLY import org.apache.spark.sql.test.SharedSparkSession +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class OptimizeMetadataOnlyQuerySuite extends QueryTest with SharedSparkSession { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala index d2a101b2395..0a22efcb34d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/QueryExecutionSuite.scala @@ -220,7 +220,8 @@ class QueryExecutionSuite extends SharedSparkSession { assertNoTag(tag5, df.queryExecution.sparkPlan) } - test("Logging plan changes for execution") { + // TODO(SPARK-44074): re-enable this test after SPARK-44074 resolved + ignore("Logging plan changes for execution") { val testAppender = new LogAppender("plan changes") withLogAppender(testAppender) { withSQLConf(SQLConf.PLAN_CHANGE_LOG_LEVEL.key -> "INFO") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala index 31c8fe38dab..593bd7bb4ba 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/adaptive/AdaptiveQueryExecSuite.scala @@ -45,8 +45,10 @@ import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.test.SQLTestData.TestData import org.apache.spark.sql.types.{IntegerType, StructType} import org.apache.spark.sql.util.QueryExecutionListener +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils +@SlowSQLTest class AdaptiveQueryExecSuite extends QueryTest with SharedSparkSession diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala index e8fae210fa4..317abd57a94 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/FileSourceAggregatePushDownSuite.scala @@ -28,6 +28,7 @@ import org.apache.spark.sql.functions.min import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types.{BinaryType, BooleanType, ByteType, DateType, Decimal, DecimalType, DoubleType, FloatType, IntegerType, LongType, ShortType, StringType, StructField, StructType, TimestampType} +import org.apache.spark.tags.SlowSQLTest /** * A test suite that tests aggregate push down for Parquet and ORC. @@ -543,12 +544,14 @@ abstract class ParquetAggregatePushDownSuite SQLConf.PARQUET_AGGREGATE_PUSHDOWN_ENABLED.key } +@SlowSQLTest class ParquetV1AggregatePushDownSuite extends ParquetAggregatePushDownSuite { override protected def sparkConf: SparkConf = super.sparkConf.set(SQLConf.USE_V1_SOURCE_LIST, "parquet") } +@SlowSQLTest class ParquetV2AggregatePushDownSuite extends ParquetAggregatePushDownSuite { override protected def sparkConf: SparkConf = @@ -562,12 +565,14 @@ abstract class OrcAggregatePushDownSuite extends OrcTest with FileSourceAggregat SQLConf.ORC_AGGREGATE_PUSHDOWN_ENABLED.key } +@SlowSQLTest class OrcV1AggregatePushDownSuite extends OrcAggregatePushDownSuite { override protected def sparkConf: SparkConf = super.sparkConf.set(SQLConf.USE_V1_SOURCE_LIST, "orc") } +@SlowSQLTest class OrcV2AggregatePushDownSuite extends OrcAggregatePushDownSuite { override protected def sparkConf: SparkConf = diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/V1WriteCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/V1WriteCommandSuite.scala index 20a90cd94b6..ce43edb79c1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/V1WriteCommandSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/V1WriteCommandSuite.scala @@ -26,6 +26,7 @@ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils} import org.apache.spark.sql.types.{IntegerType, StringType} import org.apache.spark.sql.util.QueryExecutionListener +import org.apache.spark.tags.SlowSQLTest trait V1WriteCommandSuiteBase extends SQLTestUtils { @@ -105,6 +106,7 @@ trait V1WriteCommandSuiteBase extends SQLTestUtils { } } +@SlowSQLTest class V1WriteCommandSuite extends QueryTest with SharedSparkSession with V1WriteCommandSuiteBase { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRebaseDatetimeSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRebaseDatetimeSuite.scala index 85c42cf7db8..240bb4e6dcb 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRebaseDatetimeSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRebaseDatetimeSuite.scala @@ -28,6 +28,7 @@ import org.apache.spark.sql.internal.SQLConf.{LegacyBehaviorPolicy, ParquetOutpu import org.apache.spark.sql.internal.SQLConf.LegacyBehaviorPolicy.{CORRECTED, EXCEPTION, LEGACY} import org.apache.spark.sql.internal.SQLConf.ParquetOutputTimestampType.{INT96, TIMESTAMP_MICROS, TIMESTAMP_MILLIS} import org.apache.spark.sql.test.SharedSparkSession +import org.apache.spark.tags.SlowSQLTest abstract class ParquetRebaseDatetimeSuite extends QueryTest @@ -461,6 +462,7 @@ abstract class ParquetRebaseDatetimeSuite } } +@SlowSQLTest class ParquetRebaseDatetimeV1Suite extends ParquetRebaseDatetimeSuite { override protected def sparkConf: SparkConf = super @@ -468,6 +470,7 @@ class ParquetRebaseDatetimeV1Suite extends ParquetRebaseDatetimeSuite { .set(SQLConf.USE_V1_SOURCE_LIST, "parquet") } +@SlowSQLTest class ParquetRebaseDatetimeV2Suite extends ParquetRebaseDatetimeSuite { override protected def sparkConf: SparkConf = super diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexSuite.scala index c36ab49b5e3..351c6d698fc 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/parquet/ParquetRowIndexSuite.scala @@ -34,7 +34,9 @@ import org.apache.spark.sql.functions.{col, max, min} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.sql.types.{LongType, StringType} +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class ParquetRowIndexSuite extends QueryTest with SharedSparkSession { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala index ae278bc7307..04996abbdc0 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/streaming/state/RocksDBSuite.scala @@ -29,8 +29,10 @@ import org.apache.spark._ import org.apache.spark.sql.catalyst.util.quietly import org.apache.spark.sql.execution.streaming.CreateAtomicTestManager import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.{ThreadUtils, Utils} +@SlowSQLTest class RocksDBSuite extends SparkFunSuite { test("RocksDB: get, put, iterator, commit, load") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/AllExecutionsPageSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/AllExecutionsPageSuite.scala index d1cd32f3621..fe5e99d3f3f 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/AllExecutionsPageSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/ui/AllExecutionsPageSuite.scala @@ -34,6 +34,7 @@ import org.apache.spark.sql.DataFrame import org.apache.spark.sql.execution.{SparkPlanInfo, SQLExecution} import org.apache.spark.sql.test.SharedSparkSession import org.apache.spark.status.{AppStatusStore, ElementTrackingStore} +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils import org.apache.spark.util.kvstore.InMemoryStore @@ -271,6 +272,7 @@ class AllExecutionsPageWithInMemoryStoreSuite extends AllExecutionsPageSuite { } } +@SlowSQLTest class AllExecutionsPageWithRocksDBBackendSuite extends AllExecutionsPageSuite { private val storePath = Utils.createTempDir() override protected def createStatusStore(): SQLAppStatusStore = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala index ad3a8844626..4dd93983e87 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/expressions/ExpressionInfoSuite.scala @@ -25,8 +25,10 @@ import org.apache.spark.sql.catalyst.expressions._ import org.apache.spark.sql.execution.HiveResult.hiveResultString import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.test.SharedSparkSession +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils +@SlowSQLTest class ExpressionInfoSuite extends SparkFunSuite with SharedSparkSession { test("Replace _FUNC_ in ExpressionInfo") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala index fc7c4e5761b..266bb343526 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedReadSuite.scala @@ -33,8 +33,10 @@ import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils} +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.collection.BitSet +@SlowSQLTest class BucketedReadWithoutHiveSupportSuite extends BucketedReadSuite with SharedSparkSession { protected override def beforeAll(): Unit = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala index 65f40cce18e..4f1b7d363a1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/BucketedWriteSuite.scala @@ -29,7 +29,9 @@ import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils} +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class BucketedWriteWithoutHiveSupportSuite extends BucketedWriteSuite with SharedSparkSession { protected override def beforeAll(): Unit = { super.beforeAll() diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/DisableUnnecessaryBucketedScanSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/DisableUnnecessaryBucketedScanSuite.scala index 737cffc42f9..1f55742cd67 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/sources/DisableUnnecessaryBucketedScanSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/DisableUnnecessaryBucketedScanSuite.scala @@ -26,7 +26,9 @@ import org.apache.spark.sql.execution.exchange.ShuffleExchangeExec import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.internal.StaticSQLConf.CATALOG_IMPLEMENTATION import org.apache.spark.sql.test.{SharedSparkSession, SQLTestUtils} +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class DisableUnnecessaryBucketedScanWithoutHiveSupportSuite extends DisableUnnecessaryBucketedScanSuite with SharedSparkSession @@ -38,6 +40,7 @@ class DisableUnnecessaryBucketedScanWithoutHiveSupportSuite } } +@SlowSQLTest class DisableUnnecessaryBucketedScanWithoutHiveSupportSuiteAE extends DisableUnnecessaryBucketedScanSuite with SharedSparkSession diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/AcceptsLatestSeenOffsetSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/AcceptsLatestSeenOffsetSuite.scala index d3e9a08509b..b0c585ffda5 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/AcceptsLatestSeenOffsetSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/AcceptsLatestSeenOffsetSuite.scala @@ -26,7 +26,9 @@ import org.apache.spark.sql.connector.read.streaming.{AcceptsLatestSeenOffset, S import org.apache.spark.sql.execution.streaming._ import org.apache.spark.sql.execution.streaming.sources.{ContinuousMemoryStream, ContinuousMemoryStreamOffset} import org.apache.spark.sql.types.{LongType, StructType} +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class AcceptsLatestSeenOffsetSuite extends StreamTest with BeforeAndAfter { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/DeprecatedStreamingAggregationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/DeprecatedStreamingAggregationSuite.scala index 99f7e32d4df..7777887ec62 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/DeprecatedStreamingAggregationSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/DeprecatedStreamingAggregationSuite.scala @@ -24,7 +24,9 @@ import org.apache.spark.sql.execution.streaming.state.StreamingAggregationStateM import org.apache.spark.sql.expressions.scalalang.typed import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.streaming.OutputMode._ +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest @deprecated("This test suite will be removed.", "3.0.0") class DeprecatedStreamingAggregationSuite extends StateStoreMetricsTest with Assertions { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala index 058c335ad43..799344332ab 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/EventTimeWatermarkSuite.scala @@ -38,8 +38,10 @@ import org.apache.spark.sql.execution.streaming.sources.MemorySink import org.apache.spark.sql.functions.{count, timestamp_seconds, window} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.streaming.OutputMode._ +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils +@SlowSQLTest class EventTimeWatermarkSuite extends StreamTest with BeforeAndAfter with Matchers with Logging { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala index 8c31d3c7abf..75f440caefc 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSinkSuite.scala @@ -41,6 +41,7 @@ import org.apache.spark.sql.execution.streaming._ import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.{IntegerType, StructField, StructType} +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils abstract class FileStreamSinkSuite extends StreamTest { @@ -673,6 +674,7 @@ class PendingCommitFilesTrackingManifestFileCommitProtocol(jobId: String, path: } } +@SlowSQLTest class FileStreamSinkV1Suite extends FileStreamSinkSuite { override protected def sparkConf: SparkConf = super @@ -723,6 +725,7 @@ class FileStreamSinkV1Suite extends FileStreamSinkSuite { } } +@SlowSQLTest class FileStreamSinkV2Suite extends FileStreamSinkSuite { override protected def sparkConf: SparkConf = super diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala index e5229c5f253..ff96b3121c2 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamSourceSuite.scala @@ -44,7 +44,8 @@ import org.apache.spark.sql.execution.streaming.sources.MemorySink import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.streaming.util.StreamManualClock import org.apache.spark.sql.test.SharedSparkSession -import org.apache.spark.sql.types.{StructType, _} +import org.apache.spark.sql.types._ +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils abstract class FileStreamSourceTest @@ -226,6 +227,7 @@ abstract class FileStreamSourceTest val valueSchema = new StructType().add("value", StringType) } +@SlowSQLTest class FileStreamSourceSuite extends FileStreamSourceTest { import testImplicits._ @@ -2350,6 +2352,7 @@ class FileStreamSourceSuite extends FileStreamSourceTest { } } +@SlowSQLTest class FileStreamSourceStressTestSuite extends FileStreamSourceTest { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamStressSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamStressSuite.scala index 28412ea07a7..a27a04283da 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamStressSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FileStreamStressSuite.scala @@ -24,6 +24,7 @@ import scala.util.Random import scala.util.control.NonFatal import org.apache.spark.sql.catalyst.util._ +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils /** @@ -36,6 +37,7 @@ import org.apache.spark.util.Utils * * At the end, the resulting files are loaded and the answer is checked. */ +@SlowSQLTest class FileStreamStressSuite extends StreamTest { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateDistributionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateDistributionSuite.scala index 9c6573fd782..1eae0fe9ef0 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateDistributionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateDistributionSuite.scala @@ -24,7 +24,9 @@ import org.apache.spark.sql.execution.python.FlatMapGroupsInPandasWithStateExec import org.apache.spark.sql.execution.streaming.MemoryStream import org.apache.spark.sql.streaming.util.{StatefulOpClusteredDistributionTestHelper, StreamManualClock} import org.apache.spark.sql.types.{IntegerType, LongType, StringType, StructField, StructType} +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class FlatMapGroupsInPandasWithStateDistributionSuite extends StreamTest with StatefulOpClusteredDistributionTestHelper { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateSuite.scala index ca738b805eb..20fb17fe6ec 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsInPandasWithStateSuite.scala @@ -28,7 +28,9 @@ import org.apache.spark.sql.functions.{lit, timestamp_seconds} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.streaming.util.StreamManualClock import org.apache.spark.sql.types._ +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class FlatMapGroupsInPandasWithStateSuite extends StateStoreMetricsTest { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateDistributionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateDistributionSuite.scala index f1578ae5df9..b597a244710 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateDistributionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateDistributionSuite.scala @@ -26,8 +26,10 @@ import org.apache.spark.sql.execution.streaming.{FlatMapGroupsWithStateExec, Mem import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.streaming.GroupStateTimeout.ProcessingTimeTimeout import org.apache.spark.sql.streaming.util.{StatefulOpClusteredDistributionTestHelper, StreamManualClock} +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils +@SlowSQLTest class FlatMapGroupsWithStateDistributionSuite extends StreamTest with StatefulOpClusteredDistributionTestHelper { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala index 49f4214ac1a..6aa7d0945c7 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateSuite.scala @@ -38,6 +38,7 @@ import org.apache.spark.sql.functions.timestamp_seconds import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.streaming.util.StreamManualClock import org.apache.spark.sql.types.{DataType, IntegerType} +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils /** Class to check custom state types */ @@ -45,6 +46,7 @@ case class RunningCount(count: Long) case class Result(key: Long, count: Int) +@SlowSQLTest class FlatMapGroupsWithStateSuite extends StateStoreMetricsTest { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateWithInitialStateSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateWithInitialStateSuite.scala index beee07b9fbc..2a2a83d35e1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateWithInitialStateSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/FlatMapGroupsWithStateWithInitialStateSuite.scala @@ -26,7 +26,9 @@ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.streaming.FlatMapGroupsWithStateSuite.{assertCanGetProcessingTime, assertCannotGetWatermark} import org.apache.spark.sql.streaming.GroupStateTimeout.{EventTimeTimeout, NoTimeout, ProcessingTimeTimeout} import org.apache.spark.sql.streaming.util.StreamManualClock +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class FlatMapGroupsWithStateWithInitialStateSuite extends StateStoreMetricsTest { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala index 7f2972edea7..65ac6712ab4 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/MemorySourceStressSuite.scala @@ -18,7 +18,9 @@ package org.apache.spark.sql.streaming import org.apache.spark.sql.execution.streaming._ +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class MemorySourceStressSuite extends StreamTest { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MultiStatefulOperatorsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/MultiStatefulOperatorsSuite.scala index eb1e0de79ca..01065b75b35 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/MultiStatefulOperatorsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/MultiStatefulOperatorsSuite.scala @@ -23,8 +23,10 @@ import org.apache.spark.sql.{AnalysisException, SparkSession} import org.apache.spark.sql.execution.streaming.MemoryStream import org.apache.spark.sql.execution.streaming.state.StateStore import org.apache.spark.sql.functions._ +import org.apache.spark.tags.SlowSQLTest // Tests for the multiple stateful operators support. +@SlowSQLTest class MultiStatefulOperatorsSuite extends StreamTest with StateStoreMetricsTest with BeforeAndAfter { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala index 62ef5824ed5..ef5b8a769fe 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamSuite.scala @@ -47,8 +47,10 @@ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.sources.StreamSourceProvider import org.apache.spark.sql.streaming.util.{BlockOnStopSourceProvider, StreamManualClock} import org.apache.spark.sql.types.{IntegerType, LongType, StructField, StructType} +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils +@SlowSQLTest class StreamSuite extends StreamTest { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala index 09a0d969459..61751133e33 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingAggregationSuite.scala @@ -43,12 +43,14 @@ import org.apache.spark.sql.streaming.OutputMode._ import org.apache.spark.sql.streaming.util.{MockSourceProvider, StreamManualClock} import org.apache.spark.sql.types.{StructType, TimestampType} import org.apache.spark.storage.{BlockId, StorageLevel, TestBlockId} +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils object FailureSingleton { var firstTime = true } +@SlowSQLTest class StreamingAggregationSuite extends StateStoreMetricsTest with Assertions { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala index 0315e03d187..190a67a25d6 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingDeduplicationSuite.scala @@ -26,8 +26,10 @@ import org.apache.spark.sql.catalyst.streaming.InternalOutputModes._ import org.apache.spark.sql.execution.streaming.MemoryStream import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils +@SlowSQLTest class StreamingDeduplicationSuite extends StateStoreMetricsTest { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala index 40868f896f5..4d92e270539 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingJoinSuite.scala @@ -36,6 +36,7 @@ import org.apache.spark.sql.execution.streaming.{MemoryStream, StatefulOperatorS import org.apache.spark.sql.execution.streaming.state.{RocksDBStateStoreProvider, StateStore, StateStoreProviderId} import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils abstract class StreamingJoinSuite @@ -223,6 +224,7 @@ abstract class StreamingJoinSuite } } +@SlowSQLTest class StreamingInnerJoinSuite extends StreamingJoinSuite { import testImplicits._ @@ -776,6 +778,7 @@ class StreamingInnerJoinSuite extends StreamingJoinSuite { } +@SlowSQLTest class StreamingOuterJoinSuite extends StreamingJoinSuite { import testImplicits._ @@ -1416,6 +1419,7 @@ class StreamingOuterJoinSuite extends StreamingJoinSuite { } } +@SlowSQLTest class StreamingFullOuterJoinSuite extends StreamingJoinSuite { test("windowed full outer join") { @@ -1619,6 +1623,7 @@ class StreamingFullOuterJoinSuite extends StreamingJoinSuite { } } +@SlowSQLTest class StreamingLeftSemiJoinSuite extends StreamingJoinSuite { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala index 7b53b4c7858..e36b3428585 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryListenerSuite.scala @@ -35,8 +35,10 @@ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.streaming.StreamingQueryListener._ import org.apache.spark.sql.streaming.ui.StreamingQueryStatusListener import org.apache.spark.sql.streaming.util.StreamManualClock +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.JsonProtocol +@SlowSQLTest class StreamingQueryListenerSuite extends StreamTest with BeforeAndAfter { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala index cc66ce85673..7deb0c69d12 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQueryManagerSuite.scala @@ -34,8 +34,10 @@ import org.apache.spark.sql.execution.datasources.v2.StreamingDataSourceV2Relati import org.apache.spark.sql.execution.streaming._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.streaming.util.BlockingSource +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils +@SlowSQLTest class StreamingQueryManagerSuite extends StreamTest { import AwaitTerminationTester._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala index 5c162835b12..b889ac18974 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingQuerySuite.scala @@ -50,7 +50,9 @@ import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.streaming.util.{BlockingSource, MockSourceProvider, StreamManualClock} import org.apache.spark.sql.types.StructType +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class StreamingQuerySuite extends StreamTest with BeforeAndAfter with Logging with MockitoSugar { import AwaitTerminationTester._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowDistributionSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowDistributionSuite.scala index c252dc8f045..36c7459ce82 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowDistributionSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowDistributionSuite.scala @@ -28,8 +28,10 @@ import org.apache.spark.sql.execution.streaming.{MemoryStream, SessionWindowStat import org.apache.spark.sql.functions.{count, session_window} import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.streaming.util.StatefulOpClusteredDistributionTestHelper +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils +@SlowSQLTest class StreamingSessionWindowDistributionSuite extends StreamTest with StatefulOpClusteredDistributionTestHelper with Logging { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowSuite.scala index 25b7506178d..12a4b7cf37a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingSessionWindowSuite.scala @@ -29,7 +29,9 @@ import org.apache.spark.sql.execution.streaming.state.{HDFSBackedStateStoreProvi import org.apache.spark.sql.expressions.Aggregator import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class StreamingSessionWindowSuite extends StreamTest with BeforeAndAfter with Matchers with Logging { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingStateStoreFormatCompatibilitySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingStateStoreFormatCompatibilitySuite.scala index 1032d6c5b6f..4827d06d64d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingStateStoreFormatCompatibilitySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/StreamingStateStoreFormatCompatibilitySuite.scala @@ -29,6 +29,7 @@ import org.apache.spark.sql.catalyst.streaming.InternalOutputModes.Complete import org.apache.spark.sql.execution.streaming.MemoryStream import org.apache.spark.sql.execution.streaming.state.{InvalidUnsafeRowException, StateSchemaNotCompatible} import org.apache.spark.sql.functions._ +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils /** @@ -39,6 +40,7 @@ import org.apache.spark.util.Utils * a new test for the issue, just like the test suite "SPARK-28067 changed the sum decimal unsafe * row format". */ +@SlowSQLTest class StreamingStateStoreFormatCompatibilitySuite extends StreamTest { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala index cb4410d9da9..65deca22207 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/TriggerAvailableNowSuite.scala @@ -26,7 +26,9 @@ import org.apache.spark.sql.connector.read.streaming import org.apache.spark.sql.connector.read.streaming.{ReadLimit, SupportsAdmissionControl} import org.apache.spark.sql.execution.streaming.{LongOffset, MemoryStream, Offset, SerializedOffset, Source, StreamingExecutionRelation} import org.apache.spark.sql.types.{LongType, StructType} +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class TriggerAvailableNowSuite extends FileStreamSourceTest { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousQueryStatusAndProgressSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousQueryStatusAndProgressSuite.scala index 59d6ac0af52..d7aa99c30aa 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousQueryStatusAndProgressSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousQueryStatusAndProgressSuite.scala @@ -20,7 +20,9 @@ package org.apache.spark.sql.streaming.continuous import org.apache.spark.sql.execution.streaming.StreamExecution import org.apache.spark.sql.execution.streaming.sources.ContinuousMemoryStream import org.apache.spark.sql.streaming.Trigger +import org.apache.spark.tags.SlowSQLTest +@SlowSQLTest class ContinuousQueryStatusAndProgressSuite extends ContinuousSuiteBase { test("StreamingQueryStatus - ContinuousExecution isDataAvailable and isTriggerActive " + "should be false") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala index fc6b51dce79..44de7af0b4a 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/continuous/ContinuousSuite.scala @@ -29,6 +29,7 @@ import org.apache.spark.sql.functions._ import org.apache.spark.sql.internal.SQLConf.{CONTINUOUS_STREAMING_EPOCH_BACKLOG_QUEUE_SIZE, MIN_BATCHES_TO_RETAIN} import org.apache.spark.sql.streaming.{StreamTest, Trigger} import org.apache.spark.sql.test.TestSparkSession +import org.apache.spark.tags.SlowSQLTest class ContinuousSuiteBase extends StreamTest { // We need more than the default local[2] to be able to schedule all partitions simultaneously. @@ -89,6 +90,7 @@ class ContinuousSuiteBase extends StreamTest { override protected val defaultTrigger = Trigger.Continuous(100) } +@SlowSQLTest class ContinuousSuite extends ContinuousSuiteBase { import IntegratedUDFTestUtils._ import testImplicits._ @@ -297,6 +299,7 @@ class ContinuousSuite extends ContinuousSuiteBase { } } +@SlowSQLTest class ContinuousStressSuite extends ContinuousSuiteBase { import testImplicits._ @@ -372,6 +375,7 @@ class ContinuousStressSuite extends ContinuousSuiteBase { } } +@SlowSQLTest class ContinuousMetaSuite extends ContinuousSuiteBase { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala index 9906defa96e..558b8973da1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/sources/StreamingDataSourceV2Suite.scala @@ -35,8 +35,10 @@ import org.apache.spark.sql.sources.{DataSourceRegister, StreamSinkProvider} import org.apache.spark.sql.streaming.{OutputMode, StreamingQuery, StreamTest, Trigger} import org.apache.spark.sql.types.StructType import org.apache.spark.sql.util.CaseInsensitiveStringMap +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils +@SlowSQLTest class FakeDataStream extends MicroBatchStream with ContinuousStream { override def deserializeOffset(json: String): Offset = RateStreamOffset(Map()) override def commit(end: Offset): Unit = {} @@ -269,6 +271,7 @@ object LastWriteOptions { } } +@SlowSQLTest class StreamingDataSourceV2Suite extends StreamTest { override def beforeAll(): Unit = { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala index 73ec5d89ff0..07a9ec4fdce 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamReaderWriterSuite.scala @@ -36,6 +36,7 @@ import org.apache.spark.sql.sources.{StreamSinkProvider, StreamSourceProvider} import org.apache.spark.sql.streaming.{OutputMode, StreamingQuery, StreamingQueryException, StreamTest} import org.apache.spark.sql.streaming.Trigger._ import org.apache.spark.sql.types._ +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils object LastOptions { @@ -108,6 +109,7 @@ class DefaultSource extends StreamSourceProvider with StreamSinkProvider { } } +@SlowSQLTest class DataStreamReaderWriterSuite extends StreamTest with BeforeAndAfter { import testImplicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala index 6bbf2239dbf..abe606ad9c1 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/test/DataStreamTableAPISuite.scala @@ -38,8 +38,10 @@ import org.apache.spark.sql.streaming.StreamTest import org.apache.spark.sql.streaming.sources.FakeScanBuilder import org.apache.spark.sql.types.StructType import org.apache.spark.sql.util.CaseInsensitiveStringMap +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.util.Utils +@SlowSQLTest class DataStreamTableAPISuite extends StreamTest with BeforeAndAfter { import testImplicits._ import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._ diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/StreamingQueryPageSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/StreamingQueryPageSuite.scala index 78ade6a1eef..0f390dde263 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/StreamingQueryPageSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/StreamingQueryPageSuite.scala @@ -29,8 +29,10 @@ import org.apache.spark.SparkConf import org.apache.spark.sql.execution.ui.StreamingQueryStatusStore import org.apache.spark.sql.streaming.StreamingQueryProgress import org.apache.spark.sql.test.SharedSparkSession +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.ui.SparkUI +@SlowSQLTest class StreamingQueryPageSuite extends SharedSparkSession with BeforeAndAfter { test("correctly display streaming query page") { diff --git a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/UISeleniumSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/UISeleniumSuite.scala index c3de44c3ba1..ca36528db5c 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/UISeleniumSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/streaming/ui/UISeleniumSuite.scala @@ -35,9 +35,11 @@ import org.apache.spark.sql.functions.{window => windowFn, _} import org.apache.spark.sql.internal.SQLConf.SHUFFLE_PARTITIONS import org.apache.spark.sql.internal.StaticSQLConf.ENABLED_STREAMING_UI_CUSTOM_METRIC_LIST import org.apache.spark.sql.streaming.{StreamingQueryException, Trigger} +import org.apache.spark.tags.SlowSQLTest import org.apache.spark.ui.SparkUICssErrorHandler import org.apache.spark.util.Utils +@SlowSQLTest class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers { implicit var webDriver: WebDriver = _ @@ -179,6 +181,7 @@ class UISeleniumSuite extends SparkFunSuite with WebBrowser with Matchers { } } +@SlowSQLTest class UISeleniumWithRocksDBBackendSuite extends UISeleniumSuite { private val storePath = Utils.createTempDir() --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org