Repository: spark
Updated Branches:
  refs/heads/master b17057c0a -> 16293311c


[SPARK-18237][HIVE] hive.exec.stagingdir have no effect

hive.exec.stagingdir have no effect in spark2.0.1,
Hive confs in hive-site.xml will be loaded in `hadoopConf`, so we should use 
`hadoopConf` in `InsertIntoHiveTable` instead of `SessionState.conf`

Author: 福星 <fux...@wacai.com>

Closes #15744 from ClassNotFoundExp/master.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/16293311
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/16293311
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/16293311

Branch: refs/heads/master
Commit: 16293311cdb25a62733a9aae4355659b971a3ce1
Parents: b17057c
Author: 福星 <fux...@wacai.com>
Authored: Thu Nov 3 12:02:01 2016 -0700
Committer: Reynold Xin <r...@databricks.com>
Committed: Thu Nov 3 12:02:01 2016 -0700

----------------------------------------------------------------------
 .../apache/spark/sql/hive/execution/InsertIntoHiveTable.scala    | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/16293311/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
----------------------------------------------------------------------
diff --git 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
index 15be12c..e333fc7 100644
--- 
a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
+++ 
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/InsertIntoHiveTable.scala
@@ -76,7 +76,8 @@ case class InsertIntoHiveTable(
 
   def output: Seq[Attribute] = Seq.empty
 
-  val stagingDir = sessionState.conf.getConfString("hive.exec.stagingdir", 
".hive-staging")
+  val hadoopConf = sessionState.newHadoopConf()
+  val stagingDir = hadoopConf.get("hive.exec.stagingdir", ".hive-staging")
 
   private def executionId: String = {
     val rand: Random = new Random
@@ -163,7 +164,6 @@ case class InsertIntoHiveTable(
     // instances within the closure, since Serializer is not serializable 
while TableDesc is.
     val tableDesc = table.tableDesc
     val tableLocation = table.hiveQlTable.getDataLocation
-    val hadoopConf = sessionState.newHadoopConf()
     val tmpLocation = getExternalTmpPath(tableLocation, hadoopConf)
     val fileSinkConf = new FileSinkDesc(tmpLocation.toString, tableDesc, false)
     val isCompressed = hadoopConf.get("hive.exec.compress.output", 
"false").toBoolean


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to