This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch branch-3.0
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/branch-3.0 by this push:
     new 6d8ee15  [MINOR][SQL] Remove an ignored test from JsonSuite
6d8ee15 is described below

commit 6d8ee156adcac8fd2a442ab70da750f950a83661
Author: Maxim Gekk <max.g...@gmail.com>
AuthorDate: Fri Mar 6 10:35:44 2020 +0900

    [MINOR][SQL] Remove an ignored test from JsonSuite
    
    ### What changes were proposed in this pull request?
    Remove ignored and outdated test `Type conflict in primitive field values 
(Ignored)` from JsonSuite.
    
    ### Why are the changes needed?
    The test is not maintained for long time. It can be removed to reduce size 
of JsonSuite, and improve maintainability.
    
    ### Does this PR introduce any user-facing change?
    No
    
    ### How was this patch tested?
    By running the command `./build/sbt "test:testOnly *JsonV2Suite"`
    
    Closes #27795 from MaxGekk/remove-ignored-test-in-JsonSuite.
    
    Authored-by: Maxim Gekk <max.g...@gmail.com>
    Signed-off-by: HyukjinKwon <gurwls...@apache.org>
    (cherry picked from commit cf7c397ede05fd106697bd5cc8062f394623bf22)
    Signed-off-by: HyukjinKwon <gurwls...@apache.org>
---
 .../sql/execution/datasources/json/JsonSuite.scala | 53 ----------------------
 1 file changed, 53 deletions(-)

diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
index df0bca7..fb3328c 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
@@ -481,59 +481,6 @@ abstract class JsonSuite extends QueryTest with 
SharedSparkSession with TestJson
     )
   }
 
-  ignore("Type conflict in primitive field values (Ignored)") {
-    val jsonDF = spark.read.json(primitiveFieldValueTypeConflict)
-    jsonDF.createOrReplaceTempView("jsonTable")
-
-    // Right now, the analyzer does not promote strings in a boolean 
expression.
-    // Number and Boolean conflict: resolve the type as boolean in this query.
-    checkAnswer(
-      sql("select num_bool from jsonTable where NOT num_bool"),
-      Row(false)
-    )
-
-    checkAnswer(
-      sql("select str_bool from jsonTable where NOT str_bool"),
-      Row(false)
-    )
-
-    // Right now, the analyzer does not know that num_bool should be treated 
as a boolean.
-    // Number and Boolean conflict: resolve the type as boolean in this query.
-    checkAnswer(
-      sql("select num_bool from jsonTable where num_bool"),
-      Row(true)
-    )
-
-    checkAnswer(
-      sql("select str_bool from jsonTable where str_bool"),
-      Row(false)
-    )
-
-    // The plan of the following DSL is
-    // Project [(CAST(num_str#65:4, DoubleType) + 1.2) AS num#78]
-    //  Filter (CAST(CAST(num_str#65:4, DoubleType), DecimalType) > 
92233720368547758060)
-    //    ExistingRdd 
[num_bool#61,num_num_1#62L,num_num_2#63,num_num_3#64,num_str#65,str_bool#66]
-    // We should directly cast num_str to DecimalType and also need to do the 
right type promotion
-    // in the Project.
-    checkAnswer(
-      jsonDF.
-        where('num_str >= BigDecimal("92233720368547758060")).
-        select(('num_str + 1.2).as("num")),
-      Row(new java.math.BigDecimal("92233720368547758071.2").doubleValue())
-    )
-
-    // The following test will fail. The type of num_str is StringType.
-    // So, to evaluate num_str + 1.2, we first need to use Cast to convert the 
type.
-    // In our test data, one value of num_str is 13.1.
-    // The result of (CAST(num_str#65:4, DoubleType) + 1.2) for this value is 
14.299999999999999,
-    // which is not 14.3.
-    // Number and String conflict: resolve the type as number in this query.
-    checkAnswer(
-      sql("select num_str + 1.2 from jsonTable where num_str > 13"),
-      Row(BigDecimal("14.3")) :: Row(BigDecimal("92233720368547758071.2")) :: 
Nil
-    )
-  }
-
   test("Type conflict in complex field values") {
     val jsonDF = spark.read.json(complexFieldValueTypeConflict)
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to