This is an automated email from the ASF dual-hosted git repository.

yao pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 06d8cbe07349 [SPARK-45847][SQL][TESTS] CliSuite flakiness due to 
non-sequential guarantee for stdout&stderr
06d8cbe07349 is described below

commit 06d8cbe073499ff16bca3165e2de1192daad3984
Author: Kent Yao <y...@apache.org>
AuthorDate: Thu Nov 9 16:23:38 2023 +0800

    [SPARK-45847][SQL][TESTS] CliSuite flakiness due to non-sequential 
guarantee for stdout&stderr
    
    ### What changes were proposed in this pull request?
    
    In CliSuite, This PR adds a retry for tests that write errors to STDERR.
    
    ### Why are the changes needed?
    
    To fix flakiness tests as below
    
https://github.com/chenhao-db/apache-spark/actions/runs/6791437199/job/18463313766
    
https://github.com/dongjoon-hyun/spark/actions/runs/6753670527/job/18361206900
    
    ```sql
    [info]   Spark master: local, Application Id: local-1699402393189
    [info]   spark-sql> /* SELECT /*+ HINT() 4; */;
    [info]
    [info]   [PARSE_SYNTAX_ERROR] Syntax error at or near ';'. SQLSTATE: 42601 
(line 1, pos 26)
    [info]
    [info]   == SQL ==
    [info]   /* SELECT /*+ HINT() 4; */;
    [info]   --------------------------^^^
    [info]
    [info]   spark-sql> /* SELECT /*+ HINT() 4; */ SELECT 1;
    [info]   1
    [info]   Time taken: 1.499 seconds, Fetched 1 row(s)
    [info]
    [info]   [UNCLOSED_BRACKETED_COMMENT] Found an unclosed bracketed comment. 
Please, append */ at the end of the comment. SQLSTATE: 42601
    [info]   == SQL ==
    [info]   /* Here is a unclosed bracketed comment SELECT 1;
    [info]   spark-sql> /* Here is a unclosed bracketed comment SELECT 1;
    [info]   spark-sql> /* SELECT /*+ HINT() */ 4; */;
    [info]   spark-sql>
    ```
    
    As you can see the fragment above, the query on the 3rd line from the 
bottom, came from STDOUT, was printed later than its error output, came from 
STDERR.
    
    In this scenario, the error output would not match anything and would 
simply go unnoticed. Finally, timed out and failed.
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    
    ### How was this patch tested?
    
    existing tests and CI
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    no
    
    Closes #43725 from yaooqinn/SPARK-45847.
    
    Authored-by: Kent Yao <y...@apache.org>
    Signed-off-by: Kent Yao <y...@apache.org>
---
 .../apache/spark/sql/hive/thriftserver/CliSuite.scala | 19 +++++++++----------
 1 file changed, 9 insertions(+), 10 deletions(-)

diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
index 5391965ded2e..4f0d4dff566c 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/CliSuite.scala
@@ -383,7 +383,7 @@ class CliSuite extends SparkFunSuite {
     )
   }
 
-  test("SPARK-11188 Analysis error reporting") {
+  testRetry("SPARK-11188 Analysis error reporting") {
     runCliWithin(timeout = 2.minute,
       errorResponses = Seq("AnalysisException"))(
       "select * from nonexistent_table;" -> "nonexistent_table"
@@ -551,7 +551,7 @@ class CliSuite extends SparkFunSuite {
     )
   }
 
-  test("SparkException with root cause will be printStacktrace") {
+  testRetry("SparkException with root cause will be printStacktrace") {
     // If it is not in silent mode, will print the stacktrace
     runCliWithin(
       1.minute,
@@ -575,8 +575,8 @@ class CliSuite extends SparkFunSuite {
     runCliWithin(1.minute)("SELECT MAKE_DATE(-44, 3, 15);" -> "-0044-03-15")
   }
 
-  test("SPARK-33100: Ignore a semicolon inside a bracketed comment in 
spark-sql") {
-    runCliWithin(4.minute)(
+  testRetry("SPARK-33100: Ignore a semicolon inside a bracketed comment in 
spark-sql") {
+    runCliWithin(1.minute)(
       "/* SELECT 'test';*/ SELECT 'test';" -> "test",
       ";;/* SELECT 'test';*/ SELECT 'test';" -> "test",
       "/* SELECT 'test';*/;; SELECT 'test';" -> "test",
@@ -623,8 +623,8 @@ class CliSuite extends SparkFunSuite {
     )
   }
 
-  test("SPARK-37555: spark-sql should pass last unclosed comment to backend") {
-    runCliWithin(5.minute)(
+  testRetry("SPARK-37555: spark-sql should pass last unclosed comment to 
backend") {
+    runCliWithin(1.minute)(
       // Only unclosed comment.
       "/* SELECT /*+ HINT() 4; */;".stripMargin -> "Syntax error at or near 
';'",
       // Unclosed nested bracketed comment.
@@ -637,7 +637,7 @@ class CliSuite extends SparkFunSuite {
     )
   }
 
-  test("SPARK-37694: delete [jar|file|archive] shall use spark sql processor") 
{
+  testRetry("SPARK-37694: delete [jar|file|archive] shall use spark sql 
processor") {
     runCliWithin(2.minute, errorResponses = Seq("ParseException"))(
       "delete jar dummy.jar;" ->
         "Syntax error at or near 'jar': missing 'FROM'. SQLSTATE: 42601 (line 
1, pos 7)")
@@ -679,7 +679,7 @@ class CliSuite extends SparkFunSuite {
     SparkSQLEnv.stop()
   }
 
-  test("SPARK-39068: support in-memory catalog and running concurrently") {
+  testRetry("SPARK-39068: support in-memory catalog and running concurrently") 
{
     val extraConf = Seq("-c", 
s"${StaticSQLConf.CATALOG_IMPLEMENTATION.key}=in-memory")
     val cd = new CountDownLatch(2)
     def t: Thread = new Thread {
@@ -699,7 +699,7 @@ class CliSuite extends SparkFunSuite {
   }
 
   // scalastyle:off line.size.limit
-  test("formats of error messages") {
+  testRetry("formats of error messages") {
     def check(format: ErrorMessageFormat.Value, errorMessage: String, silent: 
Boolean): Unit = {
       val expected = errorMessage.split(System.lineSeparator()).map("" -> _)
       runCliWithin(
@@ -811,7 +811,6 @@ class CliSuite extends SparkFunSuite {
       
s"spark.sql.catalog.$catalogName.url=jdbc:derby:memory:$catalogName;create=true"
     val catalogDriver =
       
s"spark.sql.catalog.$catalogName.driver=org.apache.derby.jdbc.AutoloadedDriver"
-    val database = s"-database $catalogName.SYS"
     val catalogConfigs =
       Seq(catalogImpl, catalogDriver, catalogUrl, 
"spark.sql.catalogImplementation=in-memory")
         .flatMap(Seq("--conf", _))


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to