Repository: spark Updated Branches: refs/heads/master 0fa5b7cac -> b6ef1f57b
[SPARK-21970][CORE] Fix Redundant Throws Declarations in Java Codebase ## What changes were proposed in this pull request? 1. Removing all redundant throws declarations from Java codebase. 2. Removing dead code made visible by this from `ShuffleExternalSorter#closeAndGetSpills` ## How was this patch tested? Build still passes. Author: Armin <m...@obrown.io> Closes #19182 from original-brownbear/SPARK-21970. Project: http://git-wip-us.apache.org/repos/asf/spark/repo Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/b6ef1f57 Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/b6ef1f57 Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/b6ef1f57 Branch: refs/heads/master Commit: b6ef1f57bc06a0b213b0367229a09b5094267d80 Parents: 0fa5b7c Author: Armin <m...@obrown.io> Authored: Wed Sep 13 14:04:26 2017 +0100 Committer: Sean Owen <so...@cloudera.com> Committed: Wed Sep 13 14:04:26 2017 +0100 ---------------------------------------------------------------------- .../spark/util/kvstore/LevelDBTypeInfo.java | 8 +++---- .../apache/spark/network/crypto/AuthEngine.java | 4 ++-- .../spark/network/sasl/SparkSaslClient.java | 3 +-- .../spark/network/sasl/SparkSaslServer.java | 3 +-- .../spark/network/server/TransportServer.java | 2 +- .../network/util/TransportFrameDecoder.java | 2 +- .../spark/util/sketch/CountMinSketchImpl.java | 2 +- .../org/apache/spark/memory/MemoryConsumer.java | 2 -- .../shuffle/sort/ShuffleExternalSorter.java | 23 ++++++++------------ .../spark/shuffle/sort/UnsafeShuffleWriter.java | 2 +- .../unsafe/sort/UnsafeExternalSorter.java | 2 +- .../unsafe/sort/UnsafeSorterSpillWriter.java | 4 ++-- .../streaming/JavaStructuredSessionization.java | 7 +++--- .../apache/spark/launcher/SparkLauncher.java | 2 +- .../parquet/VectorizedColumnReader.java | 17 +++++++-------- 15 files changed, 36 insertions(+), 47 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDBTypeInfo.java ---------------------------------------------------------------------- diff --git a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDBTypeInfo.java b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDBTypeInfo.java index 93aa0bb..232ee41 100644 --- a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDBTypeInfo.java +++ b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDBTypeInfo.java @@ -249,7 +249,7 @@ class LevelDBTypeInfo { * calculated only once, avoiding redundant work when multiple child indices of the * same parent index exist. */ - byte[] childPrefix(Object value) throws Exception { + byte[] childPrefix(Object value) { Preconditions.checkState(parent == null, "Not a parent index."); return buildKey(name, toParentKey(value)); } @@ -295,7 +295,7 @@ class LevelDBTypeInfo { } /** The key for the end marker for entries with the given value. */ - byte[] end(byte[] prefix, Object value) throws Exception { + byte[] end(byte[] prefix, Object value) { checkParent(prefix); return (parent != null) ? buildKey(false, prefix, name, toKey(value), END_MARKER) : buildKey(name, toKey(value), END_MARKER); @@ -313,7 +313,7 @@ class LevelDBTypeInfo { return entityKey; } - private void updateCount(WriteBatch batch, byte[] key, long delta) throws Exception { + private void updateCount(WriteBatch batch, byte[] key, long delta) { long updated = getCount(key) + delta; if (updated > 0) { batch.put(key, db.serializer.serialize(updated)); @@ -431,7 +431,7 @@ class LevelDBTypeInfo { addOrRemove(batch, entity, null, null, naturalKey, prefix); } - long getCount(byte[] key) throws Exception { + long getCount(byte[] key) { byte[] data = db.db().get(key); return data != null ? db.serializer.deserializeLong(data) : 0; } http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthEngine.java ---------------------------------------------------------------------- diff --git a/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthEngine.java b/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthEngine.java index b769ebe..056505e 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthEngine.java +++ b/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthEngine.java @@ -81,7 +81,7 @@ class AuthEngine implements Closeable { * * @return A challenge to be sent the remote side. */ - ClientChallenge challenge() throws GeneralSecurityException, IOException { + ClientChallenge challenge() throws GeneralSecurityException { this.authNonce = randomBytes(conf.encryptionKeyLength() / Byte.SIZE); SecretKeySpec authKey = generateKey(conf.keyFactoryAlgorithm(), conf.keyFactoryIterations(), authNonce, conf.encryptionKeyLength()); @@ -105,7 +105,7 @@ class AuthEngine implements Closeable { * @return A response to be sent to the client. */ ServerResponse respond(ClientChallenge clientChallenge) - throws GeneralSecurityException, IOException { + throws GeneralSecurityException { SecretKeySpec authKey = generateKey(clientChallenge.kdf, clientChallenge.iterations, clientChallenge.nonce, clientChallenge.keyLength); http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java ---------------------------------------------------------------------- diff --git a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java index b6256de..05a5afe 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java +++ b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java @@ -17,7 +17,6 @@ package org.apache.spark.network.sasl; -import java.io.IOException; import java.util.Map; import javax.security.auth.callback.Callback; import javax.security.auth.callback.CallbackHandler; @@ -125,7 +124,7 @@ public class SparkSaslClient implements SaslEncryptionBackend { */ private class ClientCallbackHandler implements CallbackHandler { @Override - public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { + public void handle(Callback[] callbacks) throws UnsupportedCallbackException { for (Callback callback : callbacks) { if (callback instanceof NameCallback) { http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java ---------------------------------------------------------------------- diff --git a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java index 00f3e83..e22e09d 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java +++ b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java @@ -27,7 +27,6 @@ import javax.security.sasl.RealmCallback; import javax.security.sasl.Sasl; import javax.security.sasl.SaslException; import javax.security.sasl.SaslServer; -import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.Map; @@ -155,7 +154,7 @@ public class SparkSaslServer implements SaslEncryptionBackend { */ private class DigestCallbackHandler implements CallbackHandler { @Override - public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException { + public void handle(Callback[] callbacks) throws UnsupportedCallbackException { for (Callback callback : callbacks) { if (callback instanceof NameCallback) { logger.trace("SASL server callback: setting username"); http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java ---------------------------------------------------------------------- diff --git a/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java b/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java index 3f1fea8..0719fa76 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java +++ b/common/network-common/src/main/java/org/apache/spark/network/server/TransportServer.java @@ -117,7 +117,7 @@ public class TransportServer implements Closeable { bootstrap.childHandler(new ChannelInitializer<SocketChannel>() { @Override - protected void initChannel(SocketChannel ch) throws Exception { + protected void initChannel(SocketChannel ch) { RpcHandler rpcHandler = appRpcHandler; for (TransportServerBootstrap bootstrap : bootstraps) { rpcHandler = bootstrap.doBootstrap(ch, rpcHandler); http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java ---------------------------------------------------------------------- diff --git a/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java b/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java index fcec7df..50d9651 100644 --- a/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java +++ b/common/network-common/src/main/java/org/apache/spark/network/util/TransportFrameDecoder.java @@ -121,7 +121,7 @@ public class TransportFrameDecoder extends ChannelInboundHandlerAdapter { return nextFrameSize; } - private ByteBuf decodeNext() throws Exception { + private ByteBuf decodeNext() { long frameSize = decodeFrameSize(); if (frameSize == UNKNOWN_FRAME_SIZE || totalSize < frameSize) { return null; http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java ---------------------------------------------------------------------- diff --git a/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java b/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java index 045fec3..fd1906d 100644 --- a/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java +++ b/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java @@ -365,7 +365,7 @@ class CountMinSketchImpl extends CountMinSketch implements Serializable { this.writeTo(out); } - private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException { + private void readObject(ObjectInputStream in) throws IOException { this.readFrom0(in); } } http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java b/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java index 48cf4b9..4099fb0 100644 --- a/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java +++ b/core/src/main/java/org/apache/spark/memory/MemoryConsumer.java @@ -111,8 +111,6 @@ public abstract class MemoryConsumer { /** * Allocate a memory block with at least `required` bytes. * - * Throws IOException if there is not enough memory. - * * @throws OutOfMemoryError */ protected MemoryBlock allocatePage(long required) { http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java index da6c55d..b4f4630 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/ShuffleExternalSorter.java @@ -140,7 +140,7 @@ final class ShuffleExternalSorter extends MemoryConsumer { * bytes written should be counted towards shuffle spill metrics rather than * shuffle write metrics. */ - private void writeSortedFile(boolean isLastFile) throws IOException { + private void writeSortedFile(boolean isLastFile) { final ShuffleWriteMetrics writeMetricsToUse; @@ -325,7 +325,7 @@ final class ShuffleExternalSorter extends MemoryConsumer { * array and grows the array if additional space is required. If the required space cannot be * obtained, then the in-memory data will be spilled to disk. */ - private void growPointerArrayIfNecessary() throws IOException { + private void growPointerArrayIfNecessary() { assert(inMemSorter != null); if (!inMemSorter.hasSpaceForAnotherRecord()) { long used = inMemSorter.getMemoryUsage(); @@ -406,19 +406,14 @@ final class ShuffleExternalSorter extends MemoryConsumer { * @throws IOException */ public SpillInfo[] closeAndGetSpills() throws IOException { - try { - if (inMemSorter != null) { - // Do not count the final file towards the spill count. - writeSortedFile(true); - freeMemory(); - inMemSorter.free(); - inMemSorter = null; - } - return spills.toArray(new SpillInfo[spills.size()]); - } catch (IOException e) { - cleanupResources(); - throw e; + if (inMemSorter != null) { + // Do not count the final file towards the spill count. + writeSortedFile(true); + freeMemory(); + inMemSorter.free(); + inMemSorter = null; } + return spills.toArray(new SpillInfo[spills.size()]); } } http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java index c0ebe3c..e9c2a69 100644 --- a/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java +++ b/core/src/main/java/org/apache/spark/shuffle/sort/UnsafeShuffleWriter.java @@ -208,7 +208,7 @@ public class UnsafeShuffleWriter<K, V> extends ShuffleWriter<K, V> { } } - private void open() throws IOException { + private void open() { assert (sorter == null); sorter = new ShuffleExternalSorter( memoryManager, http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java index e2059ce..de44640 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java @@ -346,7 +346,7 @@ public final class UnsafeExternalSorter extends MemoryConsumer { * array and grows the array if additional space is required. If the required space cannot be * obtained, then the in-memory data will be spilled to disk. */ - private void growPointerArrayIfNecessary() throws IOException { + private void growPointerArrayIfNecessary() { assert(inMemSorter != null); if (!inMemSorter.hasSpaceForAnotherRecord()) { long used = inMemSorter.getMemoryUsage(); http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java index 850f247..9399024 100644 --- a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java +++ b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeSorterSpillWriter.java @@ -79,7 +79,7 @@ public final class UnsafeSorterSpillWriter { } // Based on DataOutputStream.writeLong. - private void writeLongToBuffer(long v, int offset) throws IOException { + private void writeLongToBuffer(long v, int offset) { writeBuffer[offset + 0] = (byte)(v >>> 56); writeBuffer[offset + 1] = (byte)(v >>> 48); writeBuffer[offset + 2] = (byte)(v >>> 40); @@ -91,7 +91,7 @@ public final class UnsafeSorterSpillWriter { } // Based on DataOutputStream.writeInt. - private void writeIntToBuffer(int v, int offset) throws IOException { + private void writeIntToBuffer(int v, int offset) { writeBuffer[offset + 0] = (byte)(v >>> 24); writeBuffer[offset + 1] = (byte)(v >>> 16); writeBuffer[offset + 2] = (byte)(v >>> 8); http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/examples/src/main/java/org/apache/spark/examples/sql/streaming/JavaStructuredSessionization.java ---------------------------------------------------------------------- diff --git a/examples/src/main/java/org/apache/spark/examples/sql/streaming/JavaStructuredSessionization.java b/examples/src/main/java/org/apache/spark/examples/sql/streaming/JavaStructuredSessionization.java index 6b8e655..943e3d8 100644 --- a/examples/src/main/java/org/apache/spark/examples/sql/streaming/JavaStructuredSessionization.java +++ b/examples/src/main/java/org/apache/spark/examples/sql/streaming/JavaStructuredSessionization.java @@ -69,7 +69,7 @@ public final class JavaStructuredSessionization { FlatMapFunction<LineWithTimestamp, Event> linesToEvents = new FlatMapFunction<LineWithTimestamp, Event>() { @Override - public Iterator<Event> call(LineWithTimestamp lineWithTimestamp) throws Exception { + public Iterator<Event> call(LineWithTimestamp lineWithTimestamp) { ArrayList<Event> eventList = new ArrayList<Event>(); for (String word : lineWithTimestamp.getLine().split(" ")) { eventList.add(new Event(word, lineWithTimestamp.getTimestamp())); @@ -91,8 +91,7 @@ public final class JavaStructuredSessionization { MapGroupsWithStateFunction<String, Event, SessionInfo, SessionUpdate> stateUpdateFunc = new MapGroupsWithStateFunction<String, Event, SessionInfo, SessionUpdate>() { @Override public SessionUpdate call( - String sessionId, Iterator<Event> events, GroupState<SessionInfo> state) - throws Exception { + String sessionId, Iterator<Event> events, GroupState<SessionInfo> state) { // If timed out, then remove session and send final update if (state.hasTimedOut()) { SessionUpdate finalUpdate = new SessionUpdate( @@ -138,7 +137,7 @@ public final class JavaStructuredSessionization { Dataset<SessionUpdate> sessionUpdates = events .groupByKey( new MapFunction<Event, String>() { - @Override public String call(Event event) throws Exception { + @Override public String call(Event event) { return event.getSessionId(); } }, Encoders.STRING()) http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java ---------------------------------------------------------------------- diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java index b83fe1b..718a368 100644 --- a/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java +++ b/launcher/src/main/java/org/apache/spark/launcher/SparkLauncher.java @@ -605,7 +605,7 @@ public class SparkLauncher { } // Visible for testing. - String findSparkSubmit() throws IOException { + String findSparkSubmit() { String script = isWindows() ? "spark-submit.cmd" : "spark-submit"; return join(File.separator, builder.getSparkHome(), "bin", script); } http://git-wip-us.apache.org/repos/asf/spark/blob/b6ef1f57/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java ---------------------------------------------------------------------- diff --git a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java index 2173bbc..3c8d766 100644 --- a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java +++ b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java @@ -350,14 +350,13 @@ public class VectorizedColumnReader { * is guaranteed that num is smaller than the number of values left in the current page. */ - private void readBooleanBatch(int rowId, int num, WritableColumnVector column) - throws IOException { + private void readBooleanBatch(int rowId, int num, WritableColumnVector column) { assert(column.dataType() == DataTypes.BooleanType); defColumn.readBooleans( num, column, rowId, maxDefLevel, (VectorizedValuesReader) dataColumn); } - private void readIntBatch(int rowId, int num, WritableColumnVector column) throws IOException { + private void readIntBatch(int rowId, int num, WritableColumnVector column) { // This is where we implement support for the valid type conversions. // TODO: implement remaining type conversions if (column.dataType() == DataTypes.IntegerType || column.dataType() == DataTypes.DateType || @@ -375,7 +374,7 @@ public class VectorizedColumnReader { } } - private void readLongBatch(int rowId, int num, WritableColumnVector column) throws IOException { + private void readLongBatch(int rowId, int num, WritableColumnVector column) { // This is where we implement support for the valid type conversions. if (column.dataType() == DataTypes.LongType || DecimalType.is64BitDecimalType(column.dataType())) { @@ -394,7 +393,7 @@ public class VectorizedColumnReader { } } - private void readFloatBatch(int rowId, int num, WritableColumnVector column) throws IOException { + private void readFloatBatch(int rowId, int num, WritableColumnVector column) { // This is where we implement support for the valid type conversions. // TODO: support implicit cast to double? if (column.dataType() == DataTypes.FloatType) { @@ -405,7 +404,7 @@ public class VectorizedColumnReader { } } - private void readDoubleBatch(int rowId, int num, WritableColumnVector column) throws IOException { + private void readDoubleBatch(int rowId, int num, WritableColumnVector column) { // This is where we implement support for the valid type conversions. // TODO: implement remaining type conversions if (column.dataType() == DataTypes.DoubleType) { @@ -416,7 +415,7 @@ public class VectorizedColumnReader { } } - private void readBinaryBatch(int rowId, int num, WritableColumnVector column) throws IOException { + private void readBinaryBatch(int rowId, int num, WritableColumnVector column) { // This is where we implement support for the valid type conversions. // TODO: implement remaining type conversions VectorizedValuesReader data = (VectorizedValuesReader) dataColumn; @@ -441,7 +440,7 @@ public class VectorizedColumnReader { int rowId, int num, WritableColumnVector column, - int arrayLen) throws IOException { + int arrayLen) { VectorizedValuesReader data = (VectorizedValuesReader) dataColumn; // This is where we implement support for the valid type conversions. // TODO: implement remaining type conversions @@ -476,7 +475,7 @@ public class VectorizedColumnReader { } } - private void readPage() throws IOException { + private void readPage() { DataPage page = pageReader.readPage(); // TODO: Why is this a visitor? page.accept(new DataPage.Visitor<Void>() { --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org