Repository: spark
Updated Branches:
  refs/heads/master 70b2bf717 -> bec0a9217


[SPARK-18654][SQL] Remove unreachable patterns in makeRootConverter

## What changes were proposed in this pull request?

`makeRootConverter` is only called with a `StructType` value. By making this 
method less general we can remove pattern matches, which are never actually hit 
outside of the test suite.

## How was this patch tested?

The existing tests.

Author: Nathan Howell <nhow...@godaddy.com>

Closes #16084 from NathanHowell/SPARK-18654.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/bec0a921
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/bec0a921
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/bec0a921

Branch: refs/heads/master
Commit: bec0a9217b896a596c0a0919888da34589fc720d
Parents: 70b2bf7
Author: Nathan Howell <nhow...@godaddy.com>
Authored: Wed Dec 7 16:52:05 2016 -0800
Committer: Reynold Xin <r...@databricks.com>
Committed: Wed Dec 7 16:52:05 2016 -0800

----------------------------------------------------------------------
 .../spark/sql/catalyst/json/JacksonParser.scala | 56 ++++++++------------
 .../execution/datasources/json/JsonSuite.scala  |  2 +-
 2 files changed, 22 insertions(+), 36 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/bec0a921/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala
index e476cb1..03e27ba 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/json/JacksonParser.scala
@@ -119,47 +119,33 @@ class JacksonParser(
    * to a value according to a desired schema. This is a wrapper for the method
    * `makeConverter()` to handle a row wrapped with an array.
    */
-  def makeRootConverter(dataType: DataType): ValueConverter = dataType match {
-    case st: StructType =>
-      val elementConverter = makeConverter(st)
-      val fieldConverters = st.map(_.dataType).map(makeConverter)
-      (parser: JsonParser) => parseJsonToken(parser, dataType) {
-        case START_OBJECT => convertObject(parser, st, fieldConverters)
-          // SPARK-3308: support reading top level JSON arrays and take every 
element
-          // in such an array as a row
-          //
-          // For example, we support, the JSON data as below:
-          //
-          // [{"a":"str_a_1"}]
-          // [{"a":"str_a_2"}, {"b":"str_b_3"}]
-          //
-          // resulting in:
-          //
-          // List([str_a_1,null])
-          // List([str_a_2,null], [null,str_b_3])
-          //
-        case START_ARRAY => convertArray(parser, elementConverter)
-      }
-
-    case ArrayType(st: StructType, _) =>
-      val elementConverter = makeConverter(st)
-      val fieldConverters = st.map(_.dataType).map(makeConverter)
-      (parser: JsonParser) => parseJsonToken(parser, dataType) {
-        // the business end of SPARK-3308:
-        // when an object is found but an array is requested just wrap it in a 
list.
-        // This is being wrapped in `JacksonParser.parse`.
-        case START_OBJECT => convertObject(parser, st, fieldConverters)
-        case START_ARRAY => convertArray(parser, elementConverter)
-      }
-
-    case _ => makeConverter(dataType)
+  private def makeRootConverter(st: StructType): ValueConverter = {
+    val elementConverter = makeConverter(st)
+    val fieldConverters = st.map(_.dataType).map(makeConverter)
+    (parser: JsonParser) => parseJsonToken(parser, st) {
+      case START_OBJECT => convertObject(parser, st, fieldConverters)
+        // SPARK-3308: support reading top level JSON arrays and take every 
element
+        // in such an array as a row
+        //
+        // For example, we support, the JSON data as below:
+        //
+        // [{"a":"str_a_1"}]
+        // [{"a":"str_a_2"}, {"b":"str_b_3"}]
+        //
+        // resulting in:
+        //
+        // List([str_a_1,null])
+        // List([str_a_2,null], [null,str_b_3])
+        //
+      case START_ARRAY => convertArray(parser, elementConverter)
+    }
   }
 
   /**
    * Create a converter which converts the JSON documents held by the 
`JsonParser`
    * to a value according to a desired schema.
    */
-  private def makeConverter(dataType: DataType): ValueConverter = dataType 
match {
+  private[sql] def makeConverter(dataType: DataType): ValueConverter = 
dataType match {
     case BooleanType =>
       (parser: JsonParser) => parseJsonToken(parser, dataType) {
         case VALUE_TRUE => true

http://git-wip-us.apache.org/repos/asf/spark/blob/bec0a921/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
index 598e44e..d4e0c0e 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/datasources/json/JsonSuite.scala
@@ -68,7 +68,7 @@ class JsonSuite extends QueryTest with SharedSQLContext with 
TestJsonData {
 
       Utils.tryWithResource(factory.createParser(writer.toString)) { 
jsonParser =>
         jsonParser.nextToken()
-        val converter = parser.makeRootConverter(dataType)
+        val converter = parser.makeConverter(dataType)
         converter.apply(jsonParser)
       }
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to