This is an automated email from the ASF dual-hosted git repository.

jcamacho pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 0280984  HIVE-22891: Skip PartitionDesc Extraction In 
CombineHiveRecord For Non-LLAP Execution Mode (Syed Shameerur Rahman, reviewed 
by Ádám Szita)
0280984 is described below

commit 0280984aefca30c1dc8a34f30172124231e7024d
Author: Syed Shameerur Rahman <srah...@qubole.com>
AuthorDate: Tue Feb 25 15:05:38 2020 -0800

    HIVE-22891: Skip PartitionDesc Extraction In CombineHiveRecord For Non-LLAP 
Execution Mode (Syed Shameerur Rahman, reviewed by Ádám Szita)
    
    Close apache/hive#914
---
 .../hadoop/hive/ql/io/CombineHiveRecordReader.java | 26 ++++++++++------------
 .../apache/hadoop/hive/ql/io/HiveInputFormat.java  | 10 +++++----
 2 files changed, 18 insertions(+), 18 deletions(-)

diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java
index 0d2eb0a..f10460df 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/CombineHiveRecordReader.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.llap.io.api.LlapProxy;
 import org.apache.hadoop.hive.ql.exec.Utilities;
 import org.apache.hadoop.hive.ql.exec.mr.ExecMapper;
 import 
org.apache.hadoop.hive.ql.io.CombineHiveInputFormat.CombineHiveInputSplit;
@@ -69,25 +70,22 @@ public class CombineHiveRecordReader<K extends 
WritableComparable, V extends Wri
           + inputFormatClassName);
     }
     InputFormat inputFormat = 
HiveInputFormat.getInputFormatFromCache(inputFormatClass, jobConf);
-    try {
-      // TODO: refactor this out
-      if (pathToPartInfo == null) {
-        MapWork mrwork;
-        if (HiveConf.getVar(conf, 
HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")) {
-          mrwork = (MapWork) Utilities.getMergeWork(jobConf);
+    if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.LLAP_IO_ENABLED, 
LlapProxy.isDaemon())) {
+      try {
+        // TODO : refactor this out
+        if (pathToPartInfo == null) {
+          MapWork mrwork = (MapWork) Utilities.getMergeWork(jobConf);
           if (mrwork == null) {
             mrwork = Utilities.getMapWork(jobConf);
           }
-        } else {
-          mrwork = Utilities.getMapWork(jobConf);
+          pathToPartInfo = mrwork.getPathToPartitionInfo();
         }
-        pathToPartInfo = mrwork.getPathToPartitionInfo();
-      }
 
-      PartitionDesc part = extractSinglePartSpec(hsplit);
-      inputFormat = HiveInputFormat.wrapForLlap(inputFormat, jobConf, part);
-    } catch (HiveException e) {
-      throw new IOException(e);
+        PartitionDesc part = extractSinglePartSpec(hsplit);
+        inputFormat = HiveInputFormat.wrapForLlap(inputFormat, jobConf, part);
+      } catch (HiveException e) {
+        throw new IOException(e);
+      }
     }
 
     // create a split for the given partition
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
index c97c961..233bd1e 100755
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java
@@ -406,10 +406,12 @@ public class HiveInputFormat<K extends 
WritableComparable, V extends Writable>
     pushProjectionsAndFilters(job, inputFormatClass, splitPath, nonNative);
 
     InputFormat inputFormat = getInputFormatFromCache(inputFormatClass, job);
-    try {
-      inputFormat = HiveInputFormat.wrapForLlap(inputFormat, job, part);
-    } catch (HiveException e) {
-      throw new IOException(e);
+    if (HiveConf.getBoolVar(job, ConfVars.LLAP_IO_ENABLED, 
LlapProxy.isDaemon())) {
+      try {
+        inputFormat = HiveInputFormat.wrapForLlap(inputFormat, job, part);
+      } catch (HiveException e) {
+        throw new IOException(e);
+      }
     }
     RecordReader innerReader = null;
     try {

Reply via email to