hopefulnick commented on code in PR #42398: URL: https://github.com/apache/spark/pull/42398#discussion_r1387425067
########## sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala: ########## @@ -158,6 +158,69 @@ class SQLQuerySuite extends QueryTest with SharedSparkSession with AdaptiveSpark } } + test("SPARK-42746: listagg function") { + withTempView("df", "df2") { + Seq(("a", "b"), ("a", "c"), ("b", "c"), ("b", "d"), (null, null)).toDF("a", "b") + .createOrReplaceTempView("df") + checkAnswer( + sql("select listagg(b) from df group by a"), + Row("") :: Row("b,c") :: Row("c,d") :: Nil) + + checkAnswer( + sql("select listagg(b) from df where 1 != 1"), + Row("") :: Nil) + + checkAnswer( + sql("select listagg(b, '|') from df group by a"), + Row("b|c") :: Row("c|d") :: Row("") :: Nil) + + checkAnswer( + sql("SELECT LISTAGG(a) FROM df"), + Row("a,a,b,b") :: Nil) + + checkAnswer( + sql("SELECT LISTAGG(DISTINCT a) FROM df"), + Row("a,b") :: Nil) + + checkAnswer( + sql("SELECT LISTAGG(a) WITHIN GROUP (ORDER BY a) FROM df"), + Row("a,a,b,b") :: Nil) + + checkAnswer( + sql("SELECT LISTAGG(a) WITHIN GROUP (ORDER BY a DESC) FROM df"), + Row("b,b,a,a") :: Nil) + + checkAnswer( + sql("SELECT LISTAGG(a) WITHIN GROUP (ORDER BY a DESC) " + + "OVER (PARTITION BY b) FROM df"), + Row("a") :: Row("b,a") :: Row("b,a") :: Row("b") :: Row("") :: Nil) + + checkAnswer( + sql("SELECT LISTAGG(a) WITHIN GROUP (ORDER BY b) FROM df"), + Row("a,a,b,b") :: Nil) + + checkAnswer( + sql("SELECT LISTAGG(a) WITHIN GROUP (ORDER BY b DESC) FROM df"), Review Comment: when specifying the custom seperator, like ',', it will get "b','a','b,','a", not the expected result "b,a,b,a" -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org