hive git commit: HIVE-11523: org.apache.hadoop.hive.ql.io.orc.FileDump should handle errors (Prasanth Jayachandran reviewed by Ashutosh Chauhan)

2015-10-26 Thread prasanthj
Repository: hive
Updated Branches:
  refs/heads/master a91e1471c -> d84e393ed


HIVE-11523: org.apache.hadoop.hive.ql.io.orc.FileDump should handle errors 
(Prasanth Jayachandran reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d84e393e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d84e393e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d84e393e

Branch: refs/heads/master
Commit: d84e393ed66829fe0c8cc87254fef2a329b96163
Parents: a91e147
Author: Prasanth Jayachandran 
Authored: Mon Oct 26 14:18:36 2015 -0500
Committer: Prasanth Jayachandran 
Committed: Mon Oct 26 14:18:36 2015 -0500

--
 .../apache/hadoop/hive/ql/io/orc/FileDump.java  | 193 +
 .../hadoop/hive/ql/io/orc/JsonFileDump.java | 210 ++-
 .../hadoop/hive/ql/io/orc/TestFileDump.java |  50 -
 3 files changed, 213 insertions(+), 240 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d84e393e/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
index a1c5058..9c6538f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
@@ -136,9 +136,16 @@ public final class FileDump {
   private static void printData(List files, Configuration conf) throws 
IOException,
   JSONException {
 for (String file : files) {
-  printJsonData(conf, file);
-  if (files.size() > 1) {
-System.out.println(Strings.repeat("=", 80) + "\n");
+  try {
+printJsonData(conf, file);
+if (files.size() > 1) {
+  System.out.println(Strings.repeat("=", 80) + "\n");
+}
+  } catch (Exception e) {
+System.err.println("Unable to dump data for file: " + file);
+e.printStackTrace();
+System.err.println(Strings.repeat("=", 80) + "\n");
+continue;
   }
 }
   }
@@ -146,103 +153,111 @@ public final class FileDump {
   private static void printMetaData(List files, Configuration conf,
   List rowIndexCols, boolean printTimeZone) throws IOException {
 for (String filename : files) {
-  System.out.println("Structure for " + filename);
-  Path path = new Path(filename);
-  Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(conf));
-  System.out.println("File Version: " + reader.getFileVersion().getName() +
-  " with " + reader.getWriterVersion());
-  RecordReaderImpl rows = (RecordReaderImpl) reader.rows();
-  System.out.println("Rows: " + reader.getNumberOfRows());
-  System.out.println("Compression: " + reader.getCompression());
-  if (reader.getCompression() != CompressionKind.NONE) {
-System.out.println("Compression size: " + reader.getCompressionSize());
-  }
-  System.out.println("Type: " + reader.getObjectInspector().getTypeName());
-  System.out.println("\nStripe Statistics:");
-  List stripeStats = reader.getStripeStatistics();
-  for (int n = 0; n < stripeStats.size(); n++) {
-System.out.println("  Stripe " + (n + 1) + ":");
-StripeStatistics ss = stripeStats.get(n);
-for (int i = 0; i < ss.getColumnStatistics().length; ++i) {
-  System.out.println("Column " + i + ": " +
-  ss.getColumnStatistics()[i].toString());
+  try {
+Path path = new Path(filename);
+Reader reader = OrcFile.createReader(path, 
OrcFile.readerOptions(conf));
+System.out.println("Structure for " + filename);
+System.out.println("File Version: " + 
reader.getFileVersion().getName() +
+" with " + reader.getWriterVersion());
+RecordReaderImpl rows = (RecordReaderImpl) reader.rows();
+System.out.println("Rows: " + reader.getNumberOfRows());
+System.out.println("Compression: " + reader.getCompression());
+if (reader.getCompression() != CompressionKind.NONE) {
+  System.out.println("Compression size: " + 
reader.getCompressionSize());
 }
-  }
-  ColumnStatistics[] stats = reader.getStatistics();
-  int colCount = stats.length;
-  System.out.println("\nFile Statistics:");
-  for (int i = 0; i < stats.length; ++i) {
-System.out.println("  Column " + i + ": " + stats[i].toString());
-  }
-  System.out.println("\nStripes:");
-  int stripeIx = -1;
-  for (StripeInformation stripe : reader.getStripes()) {
-++stripeIx;
-long stripeStart = stripe.getOffset();
-OrcProto.StripeFooter footer = rows.readStripeFooter(stripe);
-if (printTimeZone) 

hive git commit: HIVE-11523: org.apache.hadoop.hive.ql.io.orc.FileDump should handle errors (Prasanth Jayachandran reviewed by Ashutosh Chauhan)

2015-10-26 Thread prasanthj
Repository: hive
Updated Branches:
  refs/heads/branch-1 eb582ed84 -> 81d732889


HIVE-11523: org.apache.hadoop.hive.ql.io.orc.FileDump should handle errors 
(Prasanth Jayachandran reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/81d73288
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/81d73288
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/81d73288

Branch: refs/heads/branch-1
Commit: 81d732889dbc13597abab5409418ca28e69ade10
Parents: eb582ed
Author: Prasanth Jayachandran 
Authored: Mon Oct 26 14:20:53 2015 -0500
Committer: Prasanth Jayachandran 
Committed: Mon Oct 26 14:20:53 2015 -0500

--
 .../apache/hadoop/hive/ql/io/orc/FileDump.java  | 192 +
 .../hadoop/hive/ql/io/orc/JsonFileDump.java | 209 ++-
 .../hadoop/hive/ql/io/orc/TestFileDump.java |  50 -
 3 files changed, 211 insertions(+), 240 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/81d73288/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
index 76ecb33..fa1543b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
@@ -136,9 +136,16 @@ public final class FileDump {
   private static void printData(List files, Configuration conf) throws 
IOException,
   JSONException {
 for (String file : files) {
-  printJsonData(conf, file);
-  if (files.size() > 1) {
-System.out.println(Strings.repeat("=", 80) + "\n");
+  try {
+printJsonData(conf, file);
+if (files.size() > 1) {
+  System.out.println(Strings.repeat("=", 80) + "\n");
+}
+  } catch (Exception e) {
+System.err.println("Unable to dump data for file: " + file);
+e.printStackTrace();
+System.err.println(Strings.repeat("=", 80) + "\n");
+continue;
   }
 }
   }
@@ -146,103 +153,110 @@ public final class FileDump {
   private static void printMetaData(List files, Configuration conf,
   List rowIndexCols, boolean printTimeZone) throws IOException {
 for (String filename : files) {
-  System.out.println("Structure for " + filename);
-  Path path = new Path(filename);
-  Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(conf));
-  System.out.println("File Version: " + reader.getFileVersion().getName() +
-  " with " + reader.getWriterVersion());
-  RecordReaderImpl rows = (RecordReaderImpl) reader.rows();
-  System.out.println("Rows: " + reader.getNumberOfRows());
-  System.out.println("Compression: " + reader.getCompression());
-  if (reader.getCompression() != CompressionKind.NONE) {
-System.out.println("Compression size: " + reader.getCompressionSize());
-  }
-  System.out.println("Type: " + reader.getObjectInspector().getTypeName());
-  System.out.println("\nStripe Statistics:");
-  Metadata metadata = reader.getMetadata();
-  for (int n = 0; n < metadata.getStripeStatistics().size(); n++) {
-System.out.println("  Stripe " + (n + 1) + ":");
-StripeStatistics ss = metadata.getStripeStatistics().get(n);
-for (int i = 0; i < ss.getColumnStatistics().length; ++i) {
-  System.out.println("Column " + i + ": " +
-  ss.getColumnStatistics()[i].toString());
+  try {
+Path path = new Path(filename);
+Reader reader = OrcFile.createReader(path, 
OrcFile.readerOptions(conf));
+System.out.println("Structure for " + filename);
+System.out.println("File Version: " + 
reader.getFileVersion().getName() +
+" with " + reader.getWriterVersion());
+RecordReaderImpl rows = (RecordReaderImpl) reader.rows();
+System.out.println("Rows: " + reader.getNumberOfRows());
+System.out.println("Compression: " + reader.getCompression());
+if (reader.getCompression() != CompressionKind.NONE) {
+  System.out.println("Compression size: " + 
reader.getCompressionSize());
 }
-  }
-  ColumnStatistics[] stats = reader.getStatistics();
-  int colCount = stats.length;
-  System.out.println("\nFile Statistics:");
-  for (int i = 0; i < stats.length; ++i) {
-System.out.println("  Column " + i + ": " + stats[i].toString());
-  }
-  System.out.println("\nStripes:");
-  int stripeIx = -1;
-  for (StripeInformation stripe : reader.getStripes()) {
-++stripeIx;
-long stripeStart = stripe.getOffset();
-OrcProto.StripeFooter footer = rows.readStripeFooter(st

[40/55] [abbrv] hive git commit: HIVE-11523: org.apache.hadoop.hive.ql.io.orc.FileDump should handle errors (Prasanth Jayachandran reviewed by Ashutosh Chauhan)

2015-10-28 Thread xuefu
HIVE-11523: org.apache.hadoop.hive.ql.io.orc.FileDump should handle errors 
(Prasanth Jayachandran reviewed by Ashutosh Chauhan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/d84e393e
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/d84e393e
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/d84e393e

Branch: refs/heads/spark
Commit: d84e393ed66829fe0c8cc87254fef2a329b96163
Parents: a91e147
Author: Prasanth Jayachandran 
Authored: Mon Oct 26 14:18:36 2015 -0500
Committer: Prasanth Jayachandran 
Committed: Mon Oct 26 14:18:36 2015 -0500

--
 .../apache/hadoop/hive/ql/io/orc/FileDump.java  | 193 +
 .../hadoop/hive/ql/io/orc/JsonFileDump.java | 210 ++-
 .../hadoop/hive/ql/io/orc/TestFileDump.java |  50 -
 3 files changed, 213 insertions(+), 240 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/hive/blob/d84e393e/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
--
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java 
b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
index a1c5058..9c6538f 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/FileDump.java
@@ -136,9 +136,16 @@ public final class FileDump {
   private static void printData(List files, Configuration conf) throws 
IOException,
   JSONException {
 for (String file : files) {
-  printJsonData(conf, file);
-  if (files.size() > 1) {
-System.out.println(Strings.repeat("=", 80) + "\n");
+  try {
+printJsonData(conf, file);
+if (files.size() > 1) {
+  System.out.println(Strings.repeat("=", 80) + "\n");
+}
+  } catch (Exception e) {
+System.err.println("Unable to dump data for file: " + file);
+e.printStackTrace();
+System.err.println(Strings.repeat("=", 80) + "\n");
+continue;
   }
 }
   }
@@ -146,103 +153,111 @@ public final class FileDump {
   private static void printMetaData(List files, Configuration conf,
   List rowIndexCols, boolean printTimeZone) throws IOException {
 for (String filename : files) {
-  System.out.println("Structure for " + filename);
-  Path path = new Path(filename);
-  Reader reader = OrcFile.createReader(path, OrcFile.readerOptions(conf));
-  System.out.println("File Version: " + reader.getFileVersion().getName() +
-  " with " + reader.getWriterVersion());
-  RecordReaderImpl rows = (RecordReaderImpl) reader.rows();
-  System.out.println("Rows: " + reader.getNumberOfRows());
-  System.out.println("Compression: " + reader.getCompression());
-  if (reader.getCompression() != CompressionKind.NONE) {
-System.out.println("Compression size: " + reader.getCompressionSize());
-  }
-  System.out.println("Type: " + reader.getObjectInspector().getTypeName());
-  System.out.println("\nStripe Statistics:");
-  List stripeStats = reader.getStripeStatistics();
-  for (int n = 0; n < stripeStats.size(); n++) {
-System.out.println("  Stripe " + (n + 1) + ":");
-StripeStatistics ss = stripeStats.get(n);
-for (int i = 0; i < ss.getColumnStatistics().length; ++i) {
-  System.out.println("Column " + i + ": " +
-  ss.getColumnStatistics()[i].toString());
+  try {
+Path path = new Path(filename);
+Reader reader = OrcFile.createReader(path, 
OrcFile.readerOptions(conf));
+System.out.println("Structure for " + filename);
+System.out.println("File Version: " + 
reader.getFileVersion().getName() +
+" with " + reader.getWriterVersion());
+RecordReaderImpl rows = (RecordReaderImpl) reader.rows();
+System.out.println("Rows: " + reader.getNumberOfRows());
+System.out.println("Compression: " + reader.getCompression());
+if (reader.getCompression() != CompressionKind.NONE) {
+  System.out.println("Compression size: " + 
reader.getCompressionSize());
 }
-  }
-  ColumnStatistics[] stats = reader.getStatistics();
-  int colCount = stats.length;
-  System.out.println("\nFile Statistics:");
-  for (int i = 0; i < stats.length; ++i) {
-System.out.println("  Column " + i + ": " + stats[i].toString());
-  }
-  System.out.println("\nStripes:");
-  int stripeIx = -1;
-  for (StripeInformation stripe : reader.getStripes()) {
-++stripeIx;
-long stripeStart = stripe.getOffset();
-OrcProto.StripeFooter footer = rows.readStripeFooter(stripe);
-if (printTimeZone) {
-  String tz = footer.getWriterTimezone();
-  if (tz == null ||