Repository: spark
Updated Branches:
  refs/heads/master c576f9fb9 -> 31ca741ae


[SPARK-16528][SQL] Fix NPE problem in HiveClientImpl

## What changes were proposed in this pull request?

There are some calls to methods or fields (getParameters, properties) which are 
then passed to Java/Scala collection converters. Unfortunately those fields can 
be null in some cases and then the conversions throws NPE. We fix it by 
wrapping calls to those fields and methods with option and then do the 
conversion.

## How was this patch tested?

Manually tested with a custom Hive metastore.

Author: Jacek Lewandowski <lewandowski.ja...@gmail.com>

Closes #14200 from jacek-lewandowski/SPARK-16528.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/31ca741a
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/31ca741a
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/31ca741a

Branch: refs/heads/master
Commit: 31ca741aef9dd138529e064785c8e58b86140ff5
Parents: c576f9f
Author: Jacek Lewandowski <lewandowski.ja...@gmail.com>
Authored: Thu Jul 14 10:18:31 2016 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Thu Jul 14 10:18:31 2016 -0700

----------------------------------------------------------------------
 .../apache/spark/sql/hive/client/HiveClientImpl.scala | 14 ++++++++------
 1 file changed, 8 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/31ca741a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
index 7e0cef3..2f102a8 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/client/HiveClientImpl.scala
@@ -293,7 +293,7 @@ private[hive] class HiveClientImpl(
         database.name,
         database.description,
         database.locationUri,
-        database.properties.asJava),
+        Option(database.properties).map(_.asJava).orNull),
         ignoreIfExists)
   }
 
@@ -311,7 +311,7 @@ private[hive] class HiveClientImpl(
         database.name,
         database.description,
         database.locationUri,
-        database.properties.asJava))
+        Option(database.properties).map(_.asJava).orNull))
   }
 
   override def getDatabaseOption(name: String): Option[CatalogDatabase] = 
withHiveState {
@@ -320,7 +320,7 @@ private[hive] class HiveClientImpl(
         name = d.getName,
         description = d.getDescription,
         locationUri = d.getLocationUri,
-        properties = d.getParameters.asScala.toMap)
+        properties = Option(d.getParameters).map(_.asScala.toMap).orNull)
     }
   }
 
@@ -353,7 +353,7 @@ private[hive] class HiveClientImpl(
         unsupportedFeatures += "bucketing"
       }
 
-      val properties = h.getParameters.asScala.toMap
+      val properties = Option(h.getParameters).map(_.asScala.toMap).orNull
 
       CatalogTable(
         identifier = TableIdentifier(h.getTableName, Option(h.getDbName)),
@@ -390,7 +390,8 @@ private[hive] class HiveClientImpl(
           outputFormat = Option(h.getOutputFormatClass).map(_.getName),
           serde = Option(h.getSerializationLib),
           compressed = h.getTTable.getSd.isCompressed,
-          serdeProperties = 
h.getTTable.getSd.getSerdeInfo.getParameters.asScala.toMap
+          serdeProperties = 
Option(h.getTTable.getSd.getSerdeInfo.getParameters)
+            .map(_.asScala.toMap).orNull
         ),
         properties = properties,
         viewOriginalText = Option(h.getViewOriginalText),
@@ -817,6 +818,7 @@ private[hive] class HiveClientImpl(
         outputFormat = Option(apiPartition.getSd.getOutputFormat),
         serde = Option(apiPartition.getSd.getSerdeInfo.getSerializationLib),
         compressed = apiPartition.getSd.isCompressed,
-        serdeProperties = 
apiPartition.getSd.getSerdeInfo.getParameters.asScala.toMap))
+        serdeProperties = Option(apiPartition.getSd.getSerdeInfo.getParameters)
+          .map(_.asScala.toMap).orNull))
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to