Repository: spark
Updated Branches:
  refs/heads/branch-2.0 741801921 -> 23e1ab9c7


[SPARK-16528][SQL] Fix NPE problem in HiveClientImpl

## What changes were proposed in this pull request?

There are some calls to methods or fields (getParameters, properties) which are 
then passed to Java/Scala collection converters. Unfortunately those fields can 
be null in some cases and then the conversions throws NPE. We fix it by 
wrapping calls to those fields and methods with option and then do the 
conversion.

## How was this patch tested?

Manually tested with a custom Hive metastore.

Author: Jacek Lewandowski <lewandowski.ja...@gmail.com>

Closes #14200 from jacek-lewandowski/SPARK-16528.

(cherry picked from commit 31ca741aef9dd138529e064785c8e58b86140ff5)
Signed-off-by: Reynold Xin <r...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/23e1ab9c
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/23e1ab9c
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/23e1ab9c

Branch: refs/heads/branch-2.0
Commit: 23e1ab9c7d56946647cb2081bd384d174bce1882
Parents: 7418019
Author: Jacek Lewandowski <lewandowski.ja...@gmail.com>
Authored: Thu Jul 14 10:18:31 2016 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Thu Jul 14 10:19:01 2016 -0700

----------------------------------------------------------------------
 .../apache/spark/sql/hive/client/HiveClientImpl.scala | 14 ++++++++------
 1 file changed, 8 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/23e1ab9c/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
index 1c89d8c..6cdf3ef 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
@@ -293,7 +293,7 @@ private[hive] class HiveClientImpl(
         database.name,
         database.description,
         database.locationUri,
-        database.properties.asJava),
+        Option(database.properties).map(_.asJava).orNull),
         ignoreIfExists)
   }
 
@@ -311,7 +311,7 @@ private[hive] class HiveClientImpl(
         database.name,
         database.description,
         database.locationUri,
-        database.properties.asJava))
+        Option(database.properties).map(_.asJava).orNull))
   }
 
   override def getDatabaseOption(name: String): Option[CatalogDatabase] = 
withHiveState {
@@ -320,7 +320,7 @@ private[hive] class HiveClientImpl(
         name = d.getName,
         description = d.getDescription,
         locationUri = d.getLocationUri,
-        properties = d.getParameters.asScala.toMap)
+        properties = Option(d.getParameters).map(_.asScala.toMap).orNull)
     }
   }
 
@@ -353,7 +353,7 @@ private[hive] class HiveClientImpl(
         unsupportedFeatures += "bucketing"
       }
 
-      val properties = h.getParameters.asScala.toMap
+      val properties = Option(h.getParameters).map(_.asScala.toMap).orNull
 
       CatalogTable(
         identifier = TableIdentifier(h.getTableName, Option(h.getDbName)),
@@ -390,7 +390,8 @@ private[hive] class HiveClientImpl(
           outputFormat = Option(h.getOutputFormatClass).map(_.getName),
           serde = Option(h.getSerializationLib),
           compressed = h.getTTable.getSd.isCompressed,
-          serdeProperties = 
h.getTTable.getSd.getSerdeInfo.getParameters.asScala.toMap
+          serdeProperties = 
Option(h.getTTable.getSd.getSerdeInfo.getParameters)
+            .map(_.asScala.toMap).orNull
         ),
         properties = properties,
         viewOriginalText = Option(h.getViewOriginalText),
@@ -815,6 +816,7 @@ private[hive] class HiveClientImpl(
         outputFormat = Option(apiPartition.getSd.getOutputFormat),
         serde = Option(apiPartition.getSd.getSerdeInfo.getSerializationLib),
         compressed = apiPartition.getSd.isCompressed,
-        serdeProperties = 
apiPartition.getSd.getSerdeInfo.getParameters.asScala.toMap))
+        serdeProperties = Option(apiPartition.getSd.getSerdeInfo.getParameters)
+          .map(_.asScala.toMap).orNull))
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to