Repository: spark Updated Branches: refs/heads/master e75e04f98 -> 69b6fed20
[SPARK-4663][sql]add finally to avoid resource leak Author: baishuo <vc_j...@hotmail.com> Closes #3526 from baishuo/master-trycatch and squashes the following commits: d446e14 [baishuo] correct the code style b36bf96 [baishuo] correct the code style ae0e447 [baishuo] add finally to avoid resource leak Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/69b6fed2 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/69b6fed2 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/69b6fed2 Branch: refs/heads/master Commit: 69b6fed206565ecb0173d3757bcb5110422887c3 Parents: e75e04f Author: baishuo <vc_j...@hotmail.com> Authored: Tue Dec 2 12:12:03 2014 -0800 Committer: Michael Armbrust <mich...@databricks.com> Committed: Tue Dec 2 12:12:03 2014 -0800 ---------------------------------------------------------------------- .../spark/sql/parquet/ParquetTableOperations.scala | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/69b6fed2/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableOperations.scala ---------------------------------------------------------------------- diff --git a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableOperations.scala b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableOperations.scala index 0e36852..232ef90 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableOperations.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/parquet/ParquetTableOperations.scala @@ -302,11 +302,14 @@ case class InsertIntoParquetTable( val committer = format.getOutputCommitter(hadoopContext) committer.setupTask(hadoopContext) val writer = format.getRecordWriter(hadoopContext) - while (iter.hasNext) { - val row = iter.next() - writer.write(null, row) + try { + while (iter.hasNext) { + val row = iter.next() + writer.write(null, row) + } + } finally { + writer.close(hadoopContext) } - writer.close(hadoopContext) committer.commitTask(hadoopContext) 1 } --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org