Repository: spark
Updated Branches:
  refs/heads/master 43b15e01c -> b5f8c36e3


[SPARK-14144][SQL] Explicitly identify/catch UnsupportedOperationException 
during parquet reader initialization

## What changes were proposed in this pull request?

This PR is a minor cleanup task as part of 
https://issues.apache.org/jira/browse/SPARK-14008 to explicitly identify/catch 
the `UnsupportedOperationException` while initializing the vectorized parquet 
reader. Other exceptions will simply be thrown back to `SqlNewHadoopPartition`.

## How was this patch tested?

N/A (cleanup only; no new functionality added)

Author: Sameer Agarwal <sam...@databricks.com>

Closes #11950 from sameeragarwal/parquet-cleanup.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b5f8c36e
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b5f8c36e
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b5f8c36e

Branch: refs/heads/master
Commit: b5f8c36e3c93750cea1473019ddd95538eccb4f3
Parents: 43b15e0
Author: Sameer Agarwal <sam...@databricks.com>
Authored: Fri Mar 25 11:48:05 2016 -0700
Committer: Yin Huai <yh...@databricks.com>
Committed: Fri Mar 25 11:48:05 2016 -0700

----------------------------------------------------------------------
 .../parquet/VectorizedParquetRecordReader.java      | 16 +++++++++-------
 1 file changed, 9 insertions(+), 7 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/b5f8c36e/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java
index ab09208..c06342c 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java
@@ -104,11 +104,12 @@ public class VectorizedParquetRecordReader extends 
SpecificParquetRecordReaderBa
    * Tries to initialize the reader for this split. Returns true if this 
reader supports reading
    * this split and false otherwise.
    */
-  public boolean tryInitialize(InputSplit inputSplit, TaskAttemptContext 
taskAttemptContext) {
+  public boolean tryInitialize(InputSplit inputSplit, TaskAttemptContext 
taskAttemptContext)
+      throws IOException, InterruptedException {
     try {
       initialize(inputSplit, taskAttemptContext);
       return true;
-    } catch (Exception e) {
+    } catch (UnsupportedOperationException e) {
       return false;
     }
   }
@@ -118,7 +119,7 @@ public class VectorizedParquetRecordReader extends 
SpecificParquetRecordReaderBa
    */
   @Override
   public void initialize(InputSplit inputSplit, TaskAttemptContext 
taskAttemptContext)
-      throws IOException, InterruptedException {
+      throws IOException, InterruptedException, UnsupportedOperationException {
     super.initialize(inputSplit, taskAttemptContext);
     initializeInternal();
   }
@@ -128,7 +129,8 @@ public class VectorizedParquetRecordReader extends 
SpecificParquetRecordReaderBa
    * objects to use this class. `columns` can contain the list of columns to 
project.
    */
   @Override
-  public void initialize(String path, List<String> columns) throws IOException 
{
+  public void initialize(String path, List<String> columns) throws IOException,
+      UnsupportedOperationException {
     super.initialize(path, columns);
     initializeInternal();
   }
@@ -248,7 +250,7 @@ public class VectorizedParquetRecordReader extends 
SpecificParquetRecordReaderBa
     return true;
   }
 
-  private void initializeInternal() throws IOException {
+  private void initializeInternal() throws IOException, 
UnsupportedOperationException {
     /**
      * Check that the requested schema is supported.
      */
@@ -256,14 +258,14 @@ public class VectorizedParquetRecordReader extends 
SpecificParquetRecordReaderBa
     for (int i = 0; i < requestedSchema.getFieldCount(); ++i) {
       Type t = requestedSchema.getFields().get(i);
       if (!t.isPrimitive() || t.isRepetition(Type.Repetition.REPEATED)) {
-        throw new IOException("Complex types not supported.");
+        throw new UnsupportedOperationException("Complex types not 
supported.");
       }
 
       String[] colPath = requestedSchema.getPaths().get(i);
       if (fileSchema.containsPath(colPath)) {
         ColumnDescriptor fd = fileSchema.getColumnDescription(colPath);
         if (!fd.equals(requestedSchema.getColumns().get(i))) {
-          throw new IOException("Schema evolution not supported.");
+          throw new UnsupportedOperationException("Schema evolution not 
supported.");
         }
         missingColumns[i] = false;
       } else {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to