This is an automated email from the ASF dual-hosted git repository.

abstractdog pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new c66ed72  HIVE-21789: HiveFileFormatUtils.getRecordWriter is 
unnecessary (Ivan Suller via Laszlo Bodor)
c66ed72 is described below

commit c66ed72faa42d7a58d2c4fb5b0019ace0962c456
Author: Ivan Suller <isul...@cloudera.com>
AuthorDate: Tue Jun 4 14:36:22 2019 +0200

    HIVE-21789: HiveFileFormatUtils.getRecordWriter is unnecessary (Ivan Suller 
via Laszlo Bodor)
    
    Signed-off-by: Laszlo Bodor <bodorlaszlo0...@gmail.com>
---
 .../java/org/apache/hadoop/hive/ql/exec/Utilities.java |  5 ++---
 .../hadoop/hive/ql/exec/persistence/RowContainer.java  |  5 ++---
 .../apache/hadoop/hive/ql/io/HiveFileFormatUtils.java  | 18 +++---------------
 3 files changed, 7 insertions(+), 21 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
index b37558c..d91cd60 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java
@@ -1617,9 +1617,8 @@ public final class Utilities {
 
     for (Path path : paths) {
       Utilities.FILE_OP_LOGGER.trace("creating empty bucket for {}", path);
-      RecordWriter writer = HiveFileFormatUtils.getRecordWriter(
-          jc, hiveOutputFormat, outputClass, isCompressed,
-          tableInfo.getProperties(), path, reporter);
+      RecordWriter writer = hiveOutputFormat.getHiveRecordWriter(jc, path, 
outputClass, isCompressed,
+          tableInfo.getProperties(), reporter);
       writer.close(false);
       LOG.info("created empty bucket for enforcing bucketing at {}", path);
     }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
index 901d544..4a47ca0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java
@@ -541,9 +541,8 @@ public class RowContainer<ROW extends List<Object>>
       HiveOutputFormat<?, ?> hiveOutputFormat = 
HiveFileFormatUtils.getHiveOutputFormat(jc, tblDesc);
       tempOutPath = new Path(tmpFile.toString());
       JobConf localJc = getLocalFSJobConfClone(jc);
-      rw = HiveFileFormatUtils.getRecordWriter(this.jobCloneUsingLocalFs,
-          hiveOutputFormat, serde.getSerializedClass(), false,
-          tblDesc.getProperties(), tempOutPath, reporter);
+      rw = hiveOutputFormat.getHiveRecordWriter(localJc, tempOutPath, 
serde.getSerializedClass(),
+          false, tblDesc.getProperties(), reporter);
     } catch (Exception e) {
       clearRows();
       LOG.error(e.toString(), e);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
index 33a6101..f76b026 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/HiveFileFormatUtils.java
@@ -274,7 +274,7 @@ public final class HiveFileFormatUtils {
         String codecStr = conf.getCompressCodec();
         if (codecStr != null && !codecStr.trim().equals("")) {
           Class<? extends CompressionCodec> codec = 
-              (Class<? extends CompressionCodec>) 
JavaUtils.loadClass(codecStr);
+              JavaUtils.loadClass(codecStr);
           FileOutputFormat.setOutputCompressorClass(jc_output, codec);
         }
         String type = conf.getCompressType();
@@ -283,25 +283,13 @@ public final class HiveFileFormatUtils {
           SequenceFileOutputFormat.setOutputCompressionType(jc, style);
         }
       }
-      return getRecordWriter(jc_output, hiveOutputFormat, outputClass,
-          isCompressed, tableInfo.getProperties(), outPath, reporter);
+      return hiveOutputFormat.getHiveRecordWriter(jc_output, outPath, 
outputClass, isCompressed,
+          tableInfo.getProperties(), reporter);
     } catch (Exception e) {
       throw new HiveException(e);
     }
   }
 
-  public static RecordWriter getRecordWriter(JobConf jc,
-      OutputFormat<?, ?> outputFormat,
-      Class<? extends Writable> valueClass, boolean isCompressed,
-      Properties tableProp, Path outPath, Reporter reporter
-      ) throws IOException, HiveException {
-    if (!(outputFormat instanceof HiveOutputFormat)) {
-      outputFormat = new HivePassThroughOutputFormat(outputFormat);
-    }
-    return ((HiveOutputFormat)outputFormat).getHiveRecordWriter(
-        jc, outPath, valueClass, isCompressed, tableProp, reporter);
-  }
-
   public static HiveOutputFormat<?, ?> getHiveOutputFormat(Configuration conf, 
TableDesc tableDesc)
       throws HiveException {
     return getHiveOutputFormat(conf, tableDesc.getOutputFileFormatClass());

Reply via email to