spark git commit: [MINOR][BUILD] Fix Java linter errors
Repository: spark Updated Branches: refs/heads/branch-2.3 6152da389 -> db27a9365 [MINOR][BUILD] Fix Java linter errors ## What changes were proposed in this pull request? This PR cleans up the java-lint errors (for v2.3.0-rc1 tag). Hopefully, this will be the final one. ``` $ dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks failed at following occurrences: [ERROR] src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java:[85] (sizes) LineLength: Line is longer than 100 characters (found 101). [ERROR] src/main/java/org/apache/spark/launcher/InProcessAppHandle.java:[20,8] (imports) UnusedImports: Unused import - java.io.IOException. [ERROR] src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java:[41,9] (modifier) ModifierOrder: 'private' modifier out of order with the JLS suggestions. [ERROR] src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java:[464] (sizes) LineLength: Line is longer than 100 characters (found 102). ``` ## How was this patch tested? Manual. ``` $ dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks passed. ``` Author: Dongjoon HyunCloses #20242 from dongjoon-hyun/fix_lint_java_2.3_rc1. (cherry picked from commit 7bd14cfd40500a0b6462cda647bdbb686a430328) Signed-off-by: Sameer Agarwal Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/db27a936 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/db27a936 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/db27a936 Branch: refs/heads/branch-2.3 Commit: db27a93652780f234f3c5fe750ef07bc5525d177 Parents: 6152da3 Author: Dongjoon Hyun Authored: Fri Jan 12 10:18:42 2018 -0800 Committer: Sameer Agarwal Committed: Fri Jan 12 10:18:59 2018 -0800 -- .../java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java | 3 ++- .../main/java/org/apache/spark/launcher/InProcessAppHandle.java | 1 - .../spark/sql/execution/datasources/orc/OrcColumnVector.java | 2 +- .../test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java | 3 ++- 4 files changed, 5 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/db27a936/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java -- diff --git a/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java b/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java index 3acfe36..a9603c1 100644 --- a/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java +++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java @@ -82,7 +82,8 @@ public class HeapMemoryAllocator implements MemoryAllocator { "page has already been freed"; assert ((memory.pageNumber == MemoryBlock.NO_PAGE_NUMBER) || (memory.pageNumber == MemoryBlock.FREED_IN_TMM_PAGE_NUMBER)) : - "TMM-allocated pages must first be freed via TMM.freePage(), not directly in allocator free()"; + "TMM-allocated pages must first be freed via TMM.freePage(), not directly in allocator " + +"free()"; final long size = memory.size(); if (MemoryAllocator.MEMORY_DEBUG_FILL_ENABLED) { http://git-wip-us.apache.org/repos/asf/spark/blob/db27a936/launcher/src/main/java/org/apache/spark/launcher/InProcessAppHandle.java -- diff --git a/launcher/src/main/java/org/apache/spark/launcher/InProcessAppHandle.java b/launcher/src/main/java/org/apache/spark/launcher/InProcessAppHandle.java index 0d6a73a..acd64c9 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/InProcessAppHandle.java +++ b/launcher/src/main/java/org/apache/spark/launcher/InProcessAppHandle.java @@ -17,7 +17,6 @@ package org.apache.spark.launcher; -import java.io.IOException; import java.lang.reflect.Method; import java.util.concurrent.atomic.AtomicLong; import java.util.logging.Level; http://git-wip-us.apache.org/repos/asf/spark/blob/db27a936/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java -- diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java index f94c55d..b6e7922 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java +++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java @@
spark git commit: [MINOR][BUILD] Fix Java linter errors
Repository: spark Updated Branches: refs/heads/master 651f76153 -> 7bd14cfd4 [MINOR][BUILD] Fix Java linter errors ## What changes were proposed in this pull request? This PR cleans up the java-lint errors (for v2.3.0-rc1 tag). Hopefully, this will be the final one. ``` $ dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks failed at following occurrences: [ERROR] src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java:[85] (sizes) LineLength: Line is longer than 100 characters (found 101). [ERROR] src/main/java/org/apache/spark/launcher/InProcessAppHandle.java:[20,8] (imports) UnusedImports: Unused import - java.io.IOException. [ERROR] src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java:[41,9] (modifier) ModifierOrder: 'private' modifier out of order with the JLS suggestions. [ERROR] src/test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java:[464] (sizes) LineLength: Line is longer than 100 characters (found 102). ``` ## How was this patch tested? Manual. ``` $ dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks passed. ``` Author: Dongjoon HyunCloses #20242 from dongjoon-hyun/fix_lint_java_2.3_rc1. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/7bd14cfd Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/7bd14cfd Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/7bd14cfd Branch: refs/heads/master Commit: 7bd14cfd40500a0b6462cda647bdbb686a430328 Parents: 651f761 Author: Dongjoon Hyun Authored: Fri Jan 12 10:18:42 2018 -0800 Committer: Sameer Agarwal Committed: Fri Jan 12 10:18:42 2018 -0800 -- .../java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java | 3 ++- .../main/java/org/apache/spark/launcher/InProcessAppHandle.java | 1 - .../spark/sql/execution/datasources/orc/OrcColumnVector.java | 2 +- .../test/java/test/org/apache/spark/sql/JavaDataFrameSuite.java | 3 ++- 4 files changed, 5 insertions(+), 4 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/7bd14cfd/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java -- diff --git a/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java b/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java index 3acfe36..a9603c1 100644 --- a/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java +++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/memory/HeapMemoryAllocator.java @@ -82,7 +82,8 @@ public class HeapMemoryAllocator implements MemoryAllocator { "page has already been freed"; assert ((memory.pageNumber == MemoryBlock.NO_PAGE_NUMBER) || (memory.pageNumber == MemoryBlock.FREED_IN_TMM_PAGE_NUMBER)) : - "TMM-allocated pages must first be freed via TMM.freePage(), not directly in allocator free()"; + "TMM-allocated pages must first be freed via TMM.freePage(), not directly in allocator " + +"free()"; final long size = memory.size(); if (MemoryAllocator.MEMORY_DEBUG_FILL_ENABLED) { http://git-wip-us.apache.org/repos/asf/spark/blob/7bd14cfd/launcher/src/main/java/org/apache/spark/launcher/InProcessAppHandle.java -- diff --git a/launcher/src/main/java/org/apache/spark/launcher/InProcessAppHandle.java b/launcher/src/main/java/org/apache/spark/launcher/InProcessAppHandle.java index 0d6a73a..acd64c9 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/InProcessAppHandle.java +++ b/launcher/src/main/java/org/apache/spark/launcher/InProcessAppHandle.java @@ -17,7 +17,6 @@ package org.apache.spark.launcher; -import java.io.IOException; import java.lang.reflect.Method; import java.util.concurrent.atomic.AtomicLong; import java.util.logging.Level; http://git-wip-us.apache.org/repos/asf/spark/blob/7bd14cfd/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java -- diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java index f94c55d..b6e7922 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java +++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVector.java @@ -38,7 +38,7 @@ public class OrcColumnVector extends org.apache.spark.sql.vectorized.ColumnVecto private BytesColumnVector
spark git commit: [MINOR][BUILD] Fix Java linter errors
Repository: spark Updated Branches: refs/heads/master 287781742 -> 5536f3181 [MINOR][BUILD] Fix Java linter errors ## What changes were proposed in this pull request? This PR cleans up a few Java linter errors for Apache Spark 2.3 release. ## How was this patch tested? ```bash $ dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks passed. ``` We can see the result from [Travis CI](https://travis-ci.org/dongjoon-hyun/spark/builds/322470787), too. Author: Dongjoon HyunCloses #20101 from dongjoon-hyun/fix-java-lint. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/5536f318 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/5536f318 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/5536f318 Branch: refs/heads/master Commit: 5536f3181c1e77c70f01d6417407d218ea48b961 Parents: 2877817 Author: Dongjoon Hyun Authored: Thu Dec 28 09:43:50 2017 -0600 Committer: Sean Owen Committed: Thu Dec 28 09:43:50 2017 -0600 -- .../java/org/apache/spark/memory/MemoryConsumer.java | 3 ++- .../streaming/kinesis/KinesisInitialPositions.java| 14 -- .../datasources/parquet/VectorizedColumnReader.java | 3 +-- .../parquet/VectorizedParquetRecordReader.java| 4 ++-- .../spark/sql/execution/vectorized/ColumnarRow.java | 3 ++- .../spark/sql/sources/v2/SessionConfigSupport.java| 3 --- .../sources/v2/streaming/ContinuousReadSupport.java | 5 - .../sources/v2/streaming/ContinuousWriteSupport.java | 6 +++--- .../spark/sql/sources/v2/streaming/reader/Offset.java | 3 ++- .../sources/v2/streaming/reader/PartitionOffset.java | 1 - .../hive/service/cli/operation/SQLOperation.java | 1 - 11 files changed, 24 insertions(+), 22 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/5536f318/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java -- diff --git a/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java b/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java index a7bd4b3..115e1fb 100644 --- a/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java +++ b/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java @@ -154,6 +154,7 @@ public abstract class MemoryConsumer { taskMemoryManager.freePage(page, this); } taskMemoryManager.showMemoryUsage(); -throw new SparkOutOfMemoryError("Unable to acquire " + required + " bytes of memory, got " + got); +throw new SparkOutOfMemoryError("Unable to acquire " + required + " bytes of memory, got " + + got); } } http://git-wip-us.apache.org/repos/asf/spark/blob/5536f318/external/kinesis-asl/src/main/java/org/apache/spark/streaming/kinesis/KinesisInitialPositions.java -- diff --git a/external/kinesis-asl/src/main/java/org/apache/spark/streaming/kinesis/KinesisInitialPositions.java b/external/kinesis-asl/src/main/java/org/apache/spark/streaming/kinesis/KinesisInitialPositions.java index 206e1e4..b5f5ab0 100644 --- a/external/kinesis-asl/src/main/java/org/apache/spark/streaming/kinesis/KinesisInitialPositions.java +++ b/external/kinesis-asl/src/main/java/org/apache/spark/streaming/kinesis/KinesisInitialPositions.java @@ -67,9 +67,10 @@ public class KinesisInitialPositions { /** - * Returns instance of [[KinesisInitialPosition]] based on the passed [[InitialPositionInStream]]. - * This method is used in KinesisUtils for translating the InitialPositionInStream - * to InitialPosition. This function would be removed when we deprecate the KinesisUtils. + * Returns instance of [[KinesisInitialPosition]] based on the passed + * [[InitialPositionInStream]]. This method is used in KinesisUtils for translating the + * InitialPositionInStream to InitialPosition. This function would be removed when we deprecate + * the KinesisUtils. * * @return [[InitialPosition]] */ @@ -83,9 +84,10 @@ public class KinesisInitialPositions { // InitialPositionInStream.AT_TIMESTAMP is not supported. // Use InitialPosition.atTimestamp(timestamp) instead. throw new UnsupportedOperationException( -"Only InitialPositionInStream.LATEST and InitialPositionInStream.TRIM_HORIZON " + -"supported in initialPositionInStream(). Please use the initialPosition() from " + -"builder API in KinesisInputDStream for using InitialPositionInStream.AT_TIMESTAMP"); +"Only InitialPositionInStream.LATEST and InitialPositionInStream." + +
spark git commit: [MINOR][BUILD] Fix Java linter errors
Repository: spark Updated Branches: refs/heads/branch-2.2 7b50736c4 -> e329beaff [MINOR][BUILD] Fix Java linter errors This PR cleans up a few Java linter errors for Apache Spark 2.2 release. ```bash $ dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks passed. ``` We can check the result at Travis CI, [here](https://travis-ci.org/dongjoon-hyun/spark/builds/244297894). Author: Dongjoon HyunCloses #18345 from dongjoon-hyun/fix_lint_java_2. (cherry picked from commit ecc5631351e81bbee4befb213f3053a4f31532a7) Signed-off-by: Sean Owen Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e329beaf Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e329beaf Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e329beaf Branch: refs/heads/branch-2.2 Commit: e329beaffe3d7691d63799cd1de2b30b990543c7 Parents: 7b50736 Author: Dongjoon Hyun Authored: Mon Jun 19 20:17:54 2017 +0100 Committer: Sean Owen Committed: Mon Jun 19 20:33:57 2017 +0100 -- .../apache/spark/network/shuffle/OneForOneBlockFetcher.java| 2 +- .../org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java | 5 +++-- .../main/java/org/apache/spark/examples/ml/JavaALSExample.java | 2 +- .../apache/spark/examples/sql/JavaSQLDataSourceExample.java| 6 +- .../main/java/org/apache/spark/sql/streaming/OutputMode.java | 1 - 5 files changed, 10 insertions(+), 6 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/e329beaf/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java -- diff --git a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java index 5f42875..d46ce2e 100644 --- a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java +++ b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/OneForOneBlockFetcher.java @@ -157,7 +157,7 @@ public class OneForOneBlockFetcher { private File targetFile = null; private int chunkIndex; -public DownloadCallback(File targetFile, int chunkIndex) throws IOException { +DownloadCallback(File targetFile, int chunkIndex) throws IOException { this.targetFile = targetFile; this.channel = Channels.newChannel(new FileOutputStream(targetFile)); this.chunkIndex = chunkIndex; http://git-wip-us.apache.org/repos/asf/spark/blob/e329beaf/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java -- diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java index 2fde5c3..6c19c27 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java @@ -353,8 +353,9 @@ public class UnsafeShuffleWriter extends ShuffleWriter { } for (int partition = 0; partition < numPartitions; partition++) { final long initialFileLength = mergedFileOutputStream.getByteCount(); -// Shield the underlying output stream from close() calls, so that we can close the higher -// level streams to make sure all data is really flushed and internal state is cleaned. +// Shield the underlying output stream from close() calls, so that we can close +// the higher level streams to make sure all data is really flushed and internal state is +// cleaned. OutputStream partitionOutput = new CloseShieldOutputStream( new TimeTrackingOutputStream(writeMetrics, mergedFileOutputStream)); partitionOutput = blockManager.serializerManager().wrapForEncryption(partitionOutput); http://git-wip-us.apache.org/repos/asf/spark/blob/e329beaf/examples/src/main/java/org/apache/spark/examples/ml/JavaALSExample.java -- diff --git a/examples/src/main/java/org/apache/spark/examples/ml/JavaALSExample.java b/examples/src/main/java/org/apache/spark/examples/ml/JavaALSExample.java index 60ef03d..fe4d6bc 100644 --- a/examples/src/main/java/org/apache/spark/examples/ml/JavaALSExample.java +++ b/examples/src/main/java/org/apache/spark/examples/ml/JavaALSExample.java @@ -121,7 +121,7 @@ public class JavaALSExample { // $example off$ userRecs.show(); movieRecs.show(); - + spark.stop(); } }
spark git commit: [MINOR][BUILD] Fix Java linter errors
Repository: spark Updated Branches: refs/heads/master e5387018e -> ecc563135 [MINOR][BUILD] Fix Java linter errors ## What changes were proposed in this pull request? This PR cleans up a few Java linter errors for Apache Spark 2.2 release. ## How was this patch tested? ```bash $ dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks passed. ``` We can check the result at Travis CI, [here](https://travis-ci.org/dongjoon-hyun/spark/builds/244297894). Author: Dongjoon HyunCloses #18345 from dongjoon-hyun/fix_lint_java_2. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/ecc56313 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/ecc56313 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/ecc56313 Branch: refs/heads/master Commit: ecc5631351e81bbee4befb213f3053a4f31532a7 Parents: e538701 Author: Dongjoon Hyun Authored: Mon Jun 19 20:17:54 2017 +0100 Committer: Sean Owen Committed: Mon Jun 19 20:17:54 2017 +0100 -- .../src/main/java/org/apache/spark/kvstore/KVIndex.java | 2 +- .../src/main/java/org/apache/spark/kvstore/KVStore.java | 7 ++- .../src/main/java/org/apache/spark/kvstore/KVStoreView.java | 3 --- .../src/main/java/org/apache/spark/kvstore/KVTypeInfo.java | 2 -- .../src/main/java/org/apache/spark/kvstore/LevelDB.java | 1 - .../main/java/org/apache/spark/kvstore/LevelDBIterator.java | 1 - .../main/java/org/apache/spark/kvstore/LevelDBTypeInfo.java | 5 - .../test/java/org/apache/spark/kvstore/DBIteratorSuite.java | 4 +--- .../src/test/java/org/apache/spark/kvstore/LevelDBSuite.java | 2 -- .../apache/spark/network/shuffle/OneForOneBlockFetcher.java | 2 +- .../org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java | 8 +--- .../java/org/apache/spark/examples/ml/JavaALSExample.java| 2 +- .../apache/spark/examples/sql/JavaSQLDataSourceExample.java | 6 +- .../main/java/org/apache/spark/sql/streaming/OutputMode.java | 1 - 14 files changed, 16 insertions(+), 30 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/ecc56313/common/kvstore/src/main/java/org/apache/spark/kvstore/KVIndex.java -- diff --git a/common/kvstore/src/main/java/org/apache/spark/kvstore/KVIndex.java b/common/kvstore/src/main/java/org/apache/spark/kvstore/KVIndex.java index 8b88990..0cffefe 100644 --- a/common/kvstore/src/main/java/org/apache/spark/kvstore/KVIndex.java +++ b/common/kvstore/src/main/java/org/apache/spark/kvstore/KVIndex.java @@ -50,7 +50,7 @@ import java.lang.annotation.Target; @Target({ElementType.FIELD, ElementType.METHOD}) public @interface KVIndex { - public static final String NATURAL_INDEX_NAME = "__main__"; + String NATURAL_INDEX_NAME = "__main__"; /** * The name of the index to be created for the annotated entity. Must be unique within http://git-wip-us.apache.org/repos/asf/spark/blob/ecc56313/common/kvstore/src/main/java/org/apache/spark/kvstore/KVStore.java -- diff --git a/common/kvstore/src/main/java/org/apache/spark/kvstore/KVStore.java b/common/kvstore/src/main/java/org/apache/spark/kvstore/KVStore.java index 3be4b82..c7808ea 100644 --- a/common/kvstore/src/main/java/org/apache/spark/kvstore/KVStore.java +++ b/common/kvstore/src/main/java/org/apache/spark/kvstore/KVStore.java @@ -18,9 +18,6 @@ package org.apache.spark.kvstore; import java.io.Closeable; -import java.util.Iterator; -import java.util.Map; -import java.util.NoSuchElementException; /** * Abstraction for a local key/value store for storing app data. @@ -84,7 +81,7 @@ public interface KVStore extends Closeable { * * @param naturalKey The object's "natural key", which uniquely identifies it. Null keys * are not allowed. - * @throws NoSuchElementException If an element with the given key does not exist. + * @throws java.util.NoSuchElementException If an element with the given key does not exist. */ T read(Class klass, Object naturalKey) throws Exception; @@ -107,7 +104,7 @@ public interface KVStore extends Closeable { * @param type The object's type. * @param naturalKey The object's "natural key", which uniquely identifies it. Null keys * are not allowed. - * @throws NoSuchElementException If an element with the given key does not exist. + * @throws java.util.NoSuchElementException If an element with the given key does not exist. */ void delete(Class type, Object naturalKey) throws Exception; http://git-wip-us.apache.org/repos/asf/spark/blob/ecc56313/common/kvstore/src/main/java/org/apache/spark/kvstore/KVStoreView.java
spark git commit: [MINOR][BUILD] Fix Java linter errors
Repository: spark Updated Branches: refs/heads/branch-2.0 0d0b41609 -> 0c6fd03fa [MINOR][BUILD] Fix Java linter errors This PR fixes the minor Java linter errors like the following. ``` -public int read(char cbuf[], int off, int len) throws IOException { +public int read(char[] cbuf, int off, int len) throws IOException { ``` Manual. ``` $ build/mvn -T 4 -q -DskipTests -Pyarn -Phadoop-2.3 -Pkinesis-asl -Phive -Phive-thriftserver install $ dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks passed. ``` Author: Dongjoon HyunCloses #14017 from dongjoon-hyun/minor_build_java_linter_error. (cherry picked from commit 3000b4b29f9165f436f186a8c1ba818e24f90615) Signed-off-by: Sean Owen Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/0c6fd03f Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/0c6fd03f Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/0c6fd03f Branch: refs/heads/branch-2.0 Commit: 0c6fd03fa763df4afb77ac4738c76f0b73e46ad0 Parents: 0d0b416 Author: Dongjoon Hyun Authored: Sat Jul 2 16:31:06 2016 +0100 Committer: Sean Owen Committed: Sat Jul 2 16:33:22 2016 +0100 -- .../spark/shuffle/sort/ShuffleExternalSorter.java | 3 ++- .../collection/unsafe/sort/UnsafeExternalSorter.java| 12 ++-- .../spark/sql/execution/UnsafeExternalRowSorter.java| 4 ++-- .../sql/execution/UnsafeFixedWidthAggregationMap.java | 4 ++-- .../spark/sql/execution/UnsafeKVExternalSorter.java | 3 ++- 5 files changed, 14 insertions(+), 12 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/0c6fd03f/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java -- diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java index 696ee73..cf38a04 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java @@ -376,7 +376,8 @@ final class ShuffleExternalSorter extends MemoryConsumer { // for tests assert(inMemSorter != null); if (inMemSorter.numRecords() >= numElementsForSpillThreshold) { - logger.info("Spilling data because number of spilledRecords crossed the threshold " + numElementsForSpillThreshold); + logger.info("Spilling data because number of spilledRecords crossed the threshold " + +numElementsForSpillThreshold); spill(); } http://git-wip-us.apache.org/repos/asf/spark/blob/0c6fd03f/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java -- diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java index 8a980d4..50f5b06 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java @@ -27,7 +27,6 @@ import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.spark.SparkEnv; import org.apache.spark.TaskContext; import org.apache.spark.executor.ShuffleWriteMetrics; import org.apache.spark.memory.MemoryConsumer; @@ -99,8 +98,8 @@ public final class UnsafeExternalSorter extends MemoryConsumer { long numElementsForSpillThreshold, UnsafeInMemorySorter inMemorySorter) throws IOException { UnsafeExternalSorter sorter = new UnsafeExternalSorter(taskMemoryManager, blockManager, - serializerManager, taskContext, recordComparator, prefixComparator, initialSize, numElementsForSpillThreshold, -pageSizeBytes, inMemorySorter, false /* ignored */); + serializerManager, taskContext, recordComparator, prefixComparator, initialSize, +numElementsForSpillThreshold, pageSizeBytes, inMemorySorter, false /* ignored */); sorter.spill(Long.MAX_VALUE, sorter); // The external sorter will be used to insert records, in-memory sorter is not needed. sorter.inMemSorter = null; @@ -119,8 +118,8 @@ public final class UnsafeExternalSorter extends MemoryConsumer { long numElementsForSpillThreshold, boolean canUseRadixSort) { return new UnsafeExternalSorter(taskMemoryManager, blockManager, serializerManager, - taskContext, recordComparator, prefixComparator, initialSize, pageSizeBytes,
spark git commit: [MINOR][BUILD] Fix Java linter errors
Repository: spark Updated Branches: refs/heads/master 0bd7cd18b -> 3000b4b29 [MINOR][BUILD] Fix Java linter errors ## What changes were proposed in this pull request? This PR fixes the minor Java linter errors like the following. ``` -public int read(char cbuf[], int off, int len) throws IOException { +public int read(char[] cbuf, int off, int len) throws IOException { ``` ## How was this patch tested? Manual. ``` $ build/mvn -T 4 -q -DskipTests -Pyarn -Phadoop-2.3 -Pkinesis-asl -Phive -Phive-thriftserver install $ dev/lint-java Using `mvn` from path: /usr/local/bin/mvn Checkstyle checks passed. ``` Author: Dongjoon HyunCloses #14017 from dongjoon-hyun/minor_build_java_linter_error. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/3000b4b2 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/3000b4b2 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/3000b4b2 Branch: refs/heads/master Commit: 3000b4b29f9165f436f186a8c1ba818e24f90615 Parents: 0bd7cd1 Author: Dongjoon Hyun Authored: Sat Jul 2 16:31:06 2016 +0100 Committer: Sean Owen Committed: Sat Jul 2 16:31:06 2016 +0100 -- .../shuffle/sort/ShuffleExternalSorter.java | 3 ++- .../unsafe/sort/UnsafeExternalSorter.java| 12 ++-- .../catalyst/expressions/xml/UDFXPathUtil.java | 19 +++ .../sql/execution/UnsafeExternalRowSorter.java | 4 ++-- .../UnsafeFixedWidthAggregationMap.java | 4 ++-- .../sql/execution/UnsafeKVExternalSorter.java| 3 ++- 6 files changed, 25 insertions(+), 20 deletions(-) -- http://git-wip-us.apache.org/repos/asf/spark/blob/3000b4b2/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java -- diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java index 696ee73..cf38a04 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java @@ -376,7 +376,8 @@ final class ShuffleExternalSorter extends MemoryConsumer { // for tests assert(inMemSorter != null); if (inMemSorter.numRecords() >= numElementsForSpillThreshold) { - logger.info("Spilling data because number of spilledRecords crossed the threshold " + numElementsForSpillThreshold); + logger.info("Spilling data because number of spilledRecords crossed the threshold " + +numElementsForSpillThreshold); spill(); } http://git-wip-us.apache.org/repos/asf/spark/blob/3000b4b2/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java -- diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java index d6a255e..8d596f8 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java @@ -27,7 +27,6 @@ import com.google.common.annotations.VisibleForTesting; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.apache.spark.SparkEnv; import org.apache.spark.TaskContext; import org.apache.spark.executor.ShuffleWriteMetrics; import org.apache.spark.memory.MemoryConsumer; @@ -99,8 +98,8 @@ public final class UnsafeExternalSorter extends MemoryConsumer { long numElementsForSpillThreshold, UnsafeInMemorySorter inMemorySorter) throws IOException { UnsafeExternalSorter sorter = new UnsafeExternalSorter(taskMemoryManager, blockManager, - serializerManager, taskContext, recordComparator, prefixComparator, initialSize, numElementsForSpillThreshold, -pageSizeBytes, inMemorySorter, false /* ignored */); + serializerManager, taskContext, recordComparator, prefixComparator, initialSize, +numElementsForSpillThreshold, pageSizeBytes, inMemorySorter, false /* ignored */); sorter.spill(Long.MAX_VALUE, sorter); // The external sorter will be used to insert records, in-memory sorter is not needed. sorter.inMemSorter = null; @@ -119,8 +118,8 @@ public final class UnsafeExternalSorter extends MemoryConsumer { long numElementsForSpillThreshold, boolean canUseRadixSort) { return new UnsafeExternalSorter(taskMemoryManager, blockManager, serializerManager, - taskContext, recordComparator, prefixComparator, initialSize, pageSizeBytes,