HyukjinKwon commented on code in PR #51236:
URL: https://github.com/apache/spark/pull/51236#discussion_r2176493277


##########
sql/core/src/test/scala/org/apache/spark/sql/DataFrameAggregateSuite.scala:
##########
@@ -2567,6 +2571,337 @@ class DataFrameAggregateSuite extends QueryTest
       checkAnswer(df, Row(1.001d, 1, 1) :: Row(6.002d, 1, 1) :: Nil)
     }
   }
+
+  test("SPARK-52515: test of 1 parameter") {
+    val res = sql(
+      "SELECT approx_top_k(expr) FROM VALUES (0), (0), (1), (1), (2), (3), 
(4), (4) AS tab(expr);"
+    )
+    checkAnswer(res, Row(Seq(Row(0, 2), Row(4, 2), Row(1, 2), Row(2, 1), 
Row(3, 1))))
+  }
+
+  test("SPARK-52515: test of 2 parameter") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2) " +
+        "FROM VALUES 'a', 'b', 'c', 'c', 'c', 'c', 'd', 'd' AS tab(expr);")
+    checkAnswer(res, Row(Seq(Row("c", 4), Row("d", 2))))
+  }
+
+  test("SPARK-52515: test of 3 parameter") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 10, 100) FROM VALUES (0), (1), (1), (2), (2), 
(2) AS tab(expr);"
+    )
+    checkAnswer(res, Row(Seq(Row(2, 3), Row(1, 2), Row(0, 1))))
+  }
+
+  test("SPARK-52515: test of Integer type") {
+    val res = sql(
+      "SELECT approx_top_k(expr) FROM VALUES (0), (0), (1), (1), (2), (3), 
(4), (4) AS tab(expr);"
+    )
+    checkAnswer(res, Row(Seq(Row(0, 2), Row(4, 2), Row(1, 2), Row(2, 1), 
Row(3, 1))))
+  }
+
+  test("SPARK-52515: test of String type") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2)" +
+        "FROM VALUES 'a', 'b', 'c', 'c', 'c', 'c', 'd', 'd' AS tab(expr);")
+    checkAnswer(res, Row(Seq(Row("c", 4), Row("d", 2))))
+  }
+
+  test("SPARK-52515: test of Boolean type") {
+    Seq(true, true, false, true, true, false, 
false).toDF("expr").createOrReplaceTempView("t_bool")
+    val res = sql("SELECT approx_top_k(expr, 1) FROM t_bool;")
+    checkAnswer(res, Row(Seq(Row(true, 4))))
+  }
+
+  test("SPARK-52515: test of Byte type") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2) " +
+        "FROM VALUES cast(0 AS BYTE), cast(0 AS BYTE), cast(1 AS BYTE), cast(1 
AS BYTE), " +
+        "cast(2 AS BYTE), cast(3 AS BYTE), cast(4 AS BYTE), cast(4 AS BYTE) AS 
tab(expr);")
+    checkAnswer(res, Row(Seq(Row(0, 2), Row(4, 2))))
+  }
+
+  test("SPARK-52515: test of Short type") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2) " +
+        "FROM VALUES cast(0 AS SHORT), cast(0 AS SHORT), cast(1 AS SHORT), 
cast(1 AS SHORT), " +
+        "cast(2 AS SHORT), cast(3 AS SHORT), cast(4 AS SHORT), cast(4 AS 
SHORT) AS tab(expr);")
+    checkAnswer(res, Row(Seq(Row(0, 2), Row(4, 2))))
+  }
+
+  test("SPARK-52515: test of Long type") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2) " +
+        "FROM VALUES cast(0 AS LONG), cast(0 AS LONG), cast(1 AS LONG), cast(1 
AS LONG), " +
+        "cast(2 AS LONG), cast(3 AS LONG), cast(4 AS LONG), cast(4 AS LONG) AS 
tab(expr);")
+    checkAnswer(res, Row(Seq(Row(0, 2), Row(4, 2))))
+  }
+
+  test("SPARK-52515: test of Float type") {
+    val res = sql(
+      "SELECT approx_top_k(expr) " +
+        "FROM VALUES cast(0.0 AS FLOAT), cast(0.0 AS FLOAT), " +
+        "cast(1.0 AS FLOAT), cast(1.0 AS FLOAT), " +
+        "cast(2.0 AS FLOAT), cast(3.0 AS FLOAT), " +
+        "cast(4.0 AS FLOAT), cast(4.0 AS FLOAT) AS tab(expr);")
+    checkAnswer(res, Row(Seq(Row(0.0, 2), Row(1.0, 2), Row(4.0, 2), Row(2.0, 
1), Row(3.0, 1))))
+  }
+
+  test("SPARK-52515: test of Double type") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2) " +
+        "FROM VALUES cast(0.0 AS DOUBLE), cast(0.0 AS DOUBLE), " +
+        "cast(1.0 AS DOUBLE), cast(1.0 AS DOUBLE), " +
+        "cast(2.0 AS DOUBLE), cast(3.0 AS DOUBLE), " +
+        "cast(4.0 AS DOUBLE), cast(4.0 AS DOUBLE) AS tab(expr);")
+    checkAnswer(res, Row(Seq(Row(0.0, 2), Row(4.0, 2))))
+  }
+
+  test("SPARK-52515: test of Date type") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2) " +
+        "FROM VALUES cast('2023-01-01' AS DATE), cast('2023-01-01' AS DATE), " 
+
+        "cast('2023-01-02' AS DATE), cast('2023-01-02' AS DATE), " +
+        "cast('2023-01-03' AS DATE), cast('2023-01-04' AS DATE), " +
+        "cast('2023-01-05' AS DATE), cast('2023-01-05' AS DATE) AS tab(expr);")
+    checkAnswer(
+      res,
+      Row(Seq(Row(Date.valueOf("2023-01-02"), 2), 
Row(Date.valueOf("2023-01-01"), 2))))
+  }
+
+  test("SPARK-52515: test of Timestamp type") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2) " +
+        "FROM VALUES cast('2023-01-01 00:00:00' AS TIMESTAMP), " +
+        "cast('2023-01-01 00:00:00' AS TIMESTAMP), " +
+        "cast('2023-01-02 00:00:00' AS TIMESTAMP), " +
+        "cast('2023-01-02 00:00:00' AS TIMESTAMP), " +
+        "cast('2023-01-03 00:00:00' AS TIMESTAMP), " +
+        "cast('2023-01-04 00:00:00' AS TIMESTAMP), " +
+        "cast('2023-01-05 00:00:00' AS TIMESTAMP), " +
+        "cast('2023-01-05 00:00:00' AS TIMESTAMP) AS tab(expr);")
+    checkAnswer(
+      res,
+      Row(Seq(Row(Timestamp.valueOf("2023-01-02 00:00:00"), 2),
+        Row(Timestamp.valueOf("2023-01-05 00:00:00"), 2))))
+  }
+
+  test("SPARK-52515: test of Timestamp_ntz type") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2) " +
+        "FROM VALUES TIMESTAMP_NTZ'2023-01-01 00:00:00', " +
+        "TIMESTAMP_NTZ'2023-01-01 00:00:00', " +
+        "TIMESTAMP_NTZ'2023-01-02 00:00:00', " +
+        "TIMESTAMP_NTZ'2023-01-02 00:00:00', " +
+        "TIMESTAMP_NTZ'2023-01-03 00:00:00', " +
+        "TIMESTAMP_NTZ'2023-01-04 00:00:00', " +
+        "TIMESTAMP_NTZ'2023-01-05 00:00:00', " +
+        "TIMESTAMP_NTZ'2023-01-05 00:00:00' AS tab(expr);")
+    checkAnswer(
+      res,
+      Row(Seq(Row(LocalDateTime.of(2023, 1, 5, 0, 0), 2),
+        Row(LocalDateTime.of(2023, 1, 1, 0, 0), 2))))
+  }
+
+  test("SPARK-52515: test of Decimal type") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2) AS top_k_result " +
+        "FROM VALUES (0.0), (0.0), (0.0) ,(1.0), (1.0), (2.0), (3.0), (4.0) AS 
tab(expr);")
+    checkAnswer(
+      res,
+      Row(Seq(Row(new java.math.BigDecimal("0.0"), 3), Row(new 
java.math.BigDecimal("1.0"), 2))))
+  }
+
+  test("SPARK-52515: test of Decimal(4, 1) type") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2) AS top_k_result " +
+        "FROM VALUES CAST(0.0 AS DECIMAL(4, 1)), CAST(0.0 AS DECIMAL(4, 1)), " 
+
+        "CAST(0.0 AS DECIMAL(4, 1)), CAST(1.0 AS DECIMAL(4, 1)), " +
+        "CAST(1.0 AS DECIMAL(4, 1)), CAST(2.0 AS DECIMAL(4, 1)), " +
+        "CAST(3.0 AS DECIMAL(4, 1)), CAST(4.0 AS DECIMAL(4, 1)) AS tab(expr);")
+    checkAnswer(
+      res,
+      Row(Seq(Row(new java.math.BigDecimal("0.0"), 3), Row(new 
java.math.BigDecimal("1.0"), 2))))
+  }
+
+  test("SPARK-52515: test of Decimal(10, 2) type") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2) AS top_k_result " +
+        "FROM VALUES CAST(0.0 AS DECIMAL(10, 2)), CAST(0.0 AS DECIMAL(10, 2)), 
" +
+        "CAST(0.0 AS DECIMAL(10, 2)), CAST(1.0 AS DECIMAL(10, 2)), " +
+        "CAST(1.0 AS DECIMAL(10, 2)), CAST(2.0 AS DECIMAL(10, 2)), " +
+        "CAST(3.0 AS DECIMAL(10, 2)), CAST(4.0 AS DECIMAL(10, 2)) AS 
tab(expr);")
+    checkAnswer(
+      res,
+      Row(Seq(Row(new java.math.BigDecimal("0.00"), 3), Row(new 
java.math.BigDecimal("1.00"), 2))))
+  }
+
+  test("SPARK-52515: test of Decimal(20, 3) type") {
+    val res = sql(
+      "SELECT approx_top_k(expr, 2) AS top_k_result " +
+        "FROM VALUES CAST(0.0 AS DECIMAL(20, 3)), CAST(0.0 AS DECIMAL(20, 3)), 
" +
+        "CAST(0.0 AS DECIMAL(20, 3)), CAST(1.0 AS DECIMAL(20, 3)), " +
+        "CAST(1.0 AS DECIMAL(20, 3)), CAST(2.0 AS DECIMAL(20, 3)), " +
+        "CAST(3.0 AS DECIMAL(20, 3)), CAST(4.0 AS DECIMAL(20, 3)) AS 
tab(expr);")
+    checkAnswer(
+      res,
+      Row(Seq(Row(new java.math.BigDecimal("0.000"), 3),
+        Row(new java.math.BigDecimal("1.000"), 2))))
+  }
+
+  test("SPARK-52515: invalid k value") {
+    checkError(
+      exception = intercept[SparkRuntimeException] {
+        sql("SELECT approx_top_k(expr, 0) FROM VALUES (0), (1), (2) AS 
tab(expr);").collect()
+      },
+      condition = "APPROX_TOP_K_NON_POSITIVE_ARG",
+      parameters = Map("argName" -> "`k`", "argValue" -> "0")
+    )
+  }
+
+  test("SPARK-52515: invalid k value > Int.MaxValue") {

Review Comment:
   Seems like this test fails with ANSI off:
   
   ```
   2025-07-01T01:59:59.7757939Z [info] - 
SPARK-52515: invalid k value > Int.MaxValue *** FAILED *** (16 
milliseconds)
   2025-07-01T01:59:59.7859149Z [info]   Expected 
exception org.apache.spark.SparkArithmeticException to be thrown, but 
org.apache.spark.SparkRuntimeException was thrown 
(DataFrameAggregateSuite.scala:2765)
   2025-07-01T01:59:59.7867839Z [info]   
org.scalatest.exceptions.TestFailedException:
   2025-07-01T01:59:59.7869104Z [info]   at 
org.scalatest.Assertions.newAssertionFailedException(Assertions.scala:472)
   2025-07-01T01:59:59.7870561Z [info]   at 
org.scalatest.Assertions.newAssertionFailedException$(Assertions.scala:471)
   2025-07-01T01:59:59.7872100Z [info]   at 
org.scalatest.funsuite.AnyFunSuite.newAssertionFailedException(AnyFunSuite.scala:1564)
   2025-07-01T01:59:59.7873453Z [info]   at 
org.scalatest.Assertions.intercept(Assertions.scala:756)
   2025-07-01T01:59:59.7874561Z [info]   at 
org.scalatest.Assertions.intercept$(Assertions.scala:746)
   2025-07-01T01:59:59.7875909Z [info]   at 
org.scalatest.funsuite.AnyFunSuite.intercept(AnyFunSuite.scala:1564)
   2025-07-01T01:59:59.7877387Z [info]   at 
org.apache.spark.sql.DataFrameAggregateSuite.$anonfun$new$426(DataFrameAggregateSuite.scala:2765)
   2025-07-01T01:59:59.7878822Z [info]   at 
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.scala:18)
   2025-07-01T01:59:59.7880044Z [info]   at 
org.scalatest.enablers.Timed$$anon$1.timeoutAfter(Timed.scala:127)
   2025-07-01T01:59:59.7881272Z [info]   at 
org.scalatest.concurrent.TimeLimits$.failAfterImpl(TimeLimits.scala:282)
   2025-07-01T01:59:59.7882562Z [info]   at 
org.scalatest.concurrent.TimeLimits.failAfter(TimeLimits.scala:231)
   2025-07-01T01:59:59.7883838Z [info]   at 
org.scalatest.concurrent.TimeLimits.failAfter$(TimeLimits.scala:230)
   2025-07-01T01:59:59.7885185Z [info]   at 
org.apache.spark.SparkFunSuite.failAfter(SparkFunSuite.scala:69)
   2025-07-01T01:59:59.7886369Z [info]   at 
org.apache.spark.SparkFunSuite.$anonfun$test$2(SparkFunSuite.scala:155)
   2025-07-01T01:59:59.7887697Z [info]   at 
org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
   2025-07-01T01:59:59.7888677Z [info]   at 
org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
   2025-07-01T01:59:59.7889695Z [info]   at 
org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
   2025-07-01T01:59:59.7890674Z [info]   at 
org.scalatest.Transformer.apply(Transformer.scala:22)
   2025-07-01T01:59:59.7891968Z [info]   at 
org.scalatest.Transformer.apply(Transformer.scala:20)
   2025-07-01T01:59:59.7893142Z [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:226)
   2025-07-01T01:59:59.7894368Z [info]   at 
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:227)
   2025-07-01T01:59:59.7895975Z [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:224)
   2025-07-01T01:59:59.7897405Z [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:236)
   2025-07-01T01:59:59.7898605Z [info]   at 
org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
   2025-07-01T01:59:59.7899782Z [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:236)
   2025-07-01T01:59:59.7901114Z [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:218)
   2025-07-01T01:59:59.7902646Z [info]   at 
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:69)
   2025-07-01T01:59:59.7904126Z [info]   at 
org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
   2025-07-01T01:59:59.7905637Z [info]   at 
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
   2025-07-01T01:59:59.7907382Z [info]   at 
org.apache.spark.sql.SingleLevelAggregateHashMapSuite.org$scalatest$BeforeAndAfter$$super$runTest(AggregateHashMapSuite.scala:27)
   2025-07-01T01:59:59.7909019Z [info]   at 
org.scalatest.BeforeAndAfter.runTest(BeforeAndAfter.scala:213)
   2025-07-01T01:59:59.7910210Z [info]   at 
org.scalatest.BeforeAndAfter.runTest$(BeforeAndAfter.scala:203)
   2025-07-01T01:59:59.7911676Z [info]   at 
org.apache.spark.sql.SingleLevelAggregateHashMapSuite.runTest(AggregateHashMapSuite.scala:27)
   2025-07-01T01:59:59.7913225Z [info]   at 
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:269)
   2025-07-01T01:59:59.7914568Z [info]   at 
org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
   2025-07-01T01:59:59.7946798Z [info]   at 
scala.collection.immutable.List.foreach(List.scala:334)
   2025-07-01T01:59:59.7947966Z [info]   at 
org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
   2025-07-01T01:59:59.7949126Z [info]   at 
org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
   ```



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to