This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 94de3ca2942 [SPARK-39111][CORE][SQL] Mark overridden methods with 
`@Override` annotation
94de3ca2942 is described below

commit 94de3ca2942bb04852510abccf06df1fa8b2dab3
Author: Qian.Sun <qian.sun2...@gmail.com>
AuthorDate: Fri May 6 18:14:27 2022 -0500

    [SPARK-39111][CORE][SQL] Mark overridden methods with `@Override` annotation
    
    ### What changes were proposed in this pull request?
    
    This PR marks overridden methods with `Override` annotation.
    
    ### Why are the changes needed?
    
    An overridden method from an interface or abstract class must be marked 
with `Override` annotation. To accurately determine whether the overriding is 
successful, an `Override` annotation is necessary. Meanwhile, once the method 
signature in the abstract class is changed, the implementation class will 
report a compile-time error immediately.
    
    ### Does this PR introduce _any_ user-facing change?
    
    No.
    
    ### How was this patch tested?
    
    Pass the unit test.
    
    Closes #36461 from dcoliversun/SPARK-39111.
    
    Authored-by: Qian.Sun <qian.sun2...@gmail.com>
    Signed-off-by: Sean Owen <sro...@gmail.com>
---
 .../src/main/java/org/apache/spark/network/TransportContext.java  | 1 +
 .../java/org/apache/spark/network/crypto/AuthServerBootstrap.java | 1 +
 .../java/org/apache/spark/network/protocol/StreamChunkId.java     | 1 +
 .../java/org/apache/spark/network/sasl/SaslServerBootstrap.java   | 1 +
 .../src/main/java/org/apache/spark/network/util/NettyLogger.java  | 1 +
 .../spark/network/shuffle/ExternalShuffleBlockResolver.java       | 1 +
 .../org/apache/spark/network/shuffle/RemoteBlockPushResolver.java | 1 +
 .../src/main/java/org/apache/spark/unsafe/types/UTF8String.java   | 2 ++
 .../java/org/apache/spark/sql/avro/SparkAvroKeyOutputFormat.java  | 4 ++++
 core/src/main/java/org/apache/spark/SparkFirehoseListener.java    | 2 ++
 .../main/java/org/apache/spark/memory/SparkOutOfMemoryError.java  | 1 +
 .../spark/util/collection/unsafe/sort/PrefixComparators.java      | 8 ++++++++
 .../spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java   | 1 +
 .../src/main/java/org/apache/spark/examples/JavaLogQuery.java     | 1 +
 .../spark/examples/sql/JavaUserDefinedTypedAggregation.java       | 6 ++++++
 .../spark/examples/sql/JavaUserDefinedUntypedAggregation.java     | 6 ++++++
 .../src/main/java/org/apache/spark/launcher/AbstractLauncher.java | 1 +
 .../spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java     | 2 ++
 .../apache/spark/sql/catalyst/expressions/UnsafeArrayData.java    | 1 +
 .../spark/sql/connector/catalog/DelegatingCatalogExtension.java   | 1 +
 .../java/org/apache/spark/sql/connector/catalog/TableChange.java  | 1 +
 .../spark/sql/connector/expressions/GeneralScalarExpression.java  | 1 +
 .../spark/sql/connector/expressions/filter/AlwaysFalse.java       | 3 +++
 .../apache/spark/sql/connector/expressions/filter/AlwaysTrue.java | 3 +++
 .../src/main/java/org/apache/spark/sql/util/NumericHistogram.java | 1 +
 .../datasources/parquet/VectorizedDeltaByteArrayReader.java       | 1 +
 .../spark/sql/execution/vectorized/OffHeapColumnVector.java       | 1 +
 .../apache/spark/sql/execution/vectorized/OnHeapColumnVector.java | 1 +
 28 files changed, 55 insertions(+)

diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
 
b/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
index 6948e595b54..b885bee7032 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/TransportContext.java
@@ -243,6 +243,7 @@ public class TransportContext implements Closeable {
     return registeredConnections;
   }
 
+  @Override
   public void close() {
     if (chunkFetchWorkers != null) {
       chunkFetchWorkers.shutdownGracefully();
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthServerBootstrap.java
 
b/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthServerBootstrap.java
index 77a2a6af4d1..f4c98fad292 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthServerBootstrap.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/crypto/AuthServerBootstrap.java
@@ -43,6 +43,7 @@ public class AuthServerBootstrap implements 
TransportServerBootstrap {
     this.secretKeyHolder = secretKeyHolder;
   }
 
+  @Override
   public RpcHandler doBootstrap(Channel channel, RpcHandler rpcHandler) {
     if (!conf.encryptionEnabled()) {
       TransportServerBootstrap sasl = new SaslServerBootstrap(conf, 
secretKeyHolder);
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamChunkId.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamChunkId.java
index 75c6d630b9c..29201d135ba 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamChunkId.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamChunkId.java
@@ -40,6 +40,7 @@ public final class StreamChunkId implements Encodable {
     return 8 + 4;
   }
 
+  @Override
   public void encode(ByteBuf buffer) {
     buffer.writeLong(streamId);
     buffer.writeInt(chunkIndex);
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslServerBootstrap.java
 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslServerBootstrap.java
index f2f983856f4..812f9b7c75c 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslServerBootstrap.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslServerBootstrap.java
@@ -42,6 +42,7 @@ public class SaslServerBootstrap implements 
TransportServerBootstrap {
    * Wrap the given application handler in a SaslRpcHandler that will handle 
the initial SASL
    * negotiation.
    */
+  @Override
   public RpcHandler doBootstrap(Channel channel, RpcHandler rpcHandler) {
     return new SaslRpcHandler(conf, channel, rpcHandler, secretKeyHolder);
   }
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java
 
b/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java
index 914c9704c79..9398726a926 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java
@@ -35,6 +35,7 @@ public class NettyLogger {
       super(clazz, level);
     }
 
+    @Override
     protected String format(ChannelHandlerContext ctx, String eventName, 
Object arg) {
       if (arg instanceof ByteBuf) {
         return format(ctx, eventName) + " " + ((ByteBuf) arg).readableBytes() 
+ "B";
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
index 4b8a5e82d74..c3b7db15d21 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalShuffleBlockResolver.java
@@ -114,6 +114,7 @@ public class ExternalShuffleBlockResolver {
     String indexCacheSize = conf.get("spark.shuffle.service.index.cache.size", 
"100m");
     CacheLoader<String, ShuffleIndexInformation> indexCacheLoader =
         new CacheLoader<String, ShuffleIndexInformation>() {
+          @Override
           public ShuffleIndexInformation load(String filePath) throws 
IOException {
             return new ShuffleIndexInformation(filePath);
           }
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java
index 626a7252943..f4de75e3fc6 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/RemoteBlockPushResolver.java
@@ -115,6 +115,7 @@ public class RemoteBlockPushResolver implements 
MergedShuffleFileManager {
     this.ioExceptionsThresholdDuringMerge = 
conf.ioExceptionsThresholdDuringMerge();
     CacheLoader<String, ShuffleIndexInformation> indexCacheLoader =
       new CacheLoader<String, ShuffleIndexInformation>() {
+        @Override
         public ShuffleIndexInformation load(String filePath) throws 
IOException {
           return new ShuffleIndexInformation(filePath);
         }
diff --git 
a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java 
b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
index bf11814c981..a4f84584ae6 100644
--- a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
+++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
@@ -1511,12 +1511,14 @@ public final class UTF8String implements 
Comparable<UTF8String>, Externalizable,
     return UTF8String.fromBytes(sx);
   }
 
+  @Override
   public void writeExternal(ObjectOutput out) throws IOException {
     byte[] bytes = getBytes();
     out.writeInt(bytes.length);
     out.write(bytes);
   }
 
+  @Override
   public void readExternal(ObjectInput in) throws IOException, 
ClassNotFoundException {
     offset = BYTE_ARRAY_OFFSET;
     numBytes = in.readInt();
diff --git 
a/connector/avro/src/main/java/org/apache/spark/sql/avro/SparkAvroKeyOutputFormat.java
 
b/connector/avro/src/main/java/org/apache/spark/sql/avro/SparkAvroKeyOutputFormat.java
index a4555844b51..df5d6d73f2f 100644
--- 
a/connector/avro/src/main/java/org/apache/spark/sql/avro/SparkAvroKeyOutputFormat.java
+++ 
b/connector/avro/src/main/java/org/apache/spark/sql/avro/SparkAvroKeyOutputFormat.java
@@ -46,6 +46,7 @@ class SparkAvroKeyOutputFormat extends 
AvroKeyOutputFormat<GenericRecord> {
       this.metadata = metadata;
     }
 
+    @Override
     protected RecordWriter<AvroKey<GenericRecord>, NullWritable> create(
         Schema writerSchema,
         GenericData dataModel,
@@ -80,14 +81,17 @@ class SparkAvroKeyRecordWriter<T> extends 
RecordWriter<AvroKey<T>, NullWritable>
     this.mAvroFileWriter.create(writerSchema, outputStream);
   }
 
+  @Override
   public void write(AvroKey<T> record, NullWritable ignore) throws IOException 
{
     this.mAvroFileWriter.append(record.datum());
   }
 
+  @Override
   public void close(TaskAttemptContext context) throws IOException {
     this.mAvroFileWriter.close();
   }
 
+  @Override
   public long sync() throws IOException {
     return this.mAvroFileWriter.sync();
   }
diff --git a/core/src/main/java/org/apache/spark/SparkFirehoseListener.java 
b/core/src/main/java/org/apache/spark/SparkFirehoseListener.java
index 7cb2455affe..2602acf59ff 100644
--- a/core/src/main/java/org/apache/spark/SparkFirehoseListener.java
+++ b/core/src/main/java/org/apache/spark/SparkFirehoseListener.java
@@ -200,11 +200,13 @@ public class SparkFirehoseListener implements 
SparkListenerInterface {
     onEvent(speculativeTask);
   }
 
+  @Override
   public void onUnschedulableTaskSetAdded(
       SparkListenerUnschedulableTaskSetAdded unschedulableTaskSetAdded) {
     onEvent(unschedulableTaskSetAdded);
   }
 
+  @Override
   public void onUnschedulableTaskSetRemoved(
       SparkListenerUnschedulableTaskSetRemoved unschedulableTaskSetRemoved) {
     onEvent(unschedulableTaskSetRemoved);
diff --git 
a/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java 
b/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java
index 88eada34e3c..22dfe4d4dbe 100644
--- a/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java
+++ b/core/src/main/java/org/apache/spark/memory/SparkOutOfMemoryError.java
@@ -44,6 +44,7 @@ public final class SparkOutOfMemoryError extends 
OutOfMemoryError implements Spa
         this.messageParameters = messageParameters;
     }
 
+    @Override
     public String getErrorClass() {
         return errorClass;
     }
diff --git 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
index 54abaf93a74..e7b128397e1 100644
--- 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
+++ 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/PrefixComparators.java
@@ -104,6 +104,7 @@ public class PrefixComparators {
     @Override public boolean sortDescending() { return false; }
     @Override public boolean sortSigned() { return false; }
     @Override public boolean nullsFirst() { return true; }
+    @Override
     public int compare(long aPrefix, long bPrefix) {
       return UnsignedLongs.compare(aPrefix, bPrefix);
     }
@@ -113,6 +114,7 @@ public class PrefixComparators {
     @Override public boolean sortDescending() { return false; }
     @Override public boolean sortSigned() { return false; }
     @Override public boolean nullsFirst() { return false; }
+    @Override
     public int compare(long aPrefix, long bPrefix) {
       return UnsignedLongs.compare(aPrefix, bPrefix);
     }
@@ -122,6 +124,7 @@ public class PrefixComparators {
     @Override public boolean sortDescending() { return true; }
     @Override public boolean sortSigned() { return false; }
     @Override public boolean nullsFirst() { return true; }
+    @Override
     public int compare(long bPrefix, long aPrefix) {
       return UnsignedLongs.compare(aPrefix, bPrefix);
     }
@@ -131,6 +134,7 @@ public class PrefixComparators {
     @Override public boolean sortDescending() { return true; }
     @Override public boolean sortSigned() { return false; }
     @Override public boolean nullsFirst() { return false; }
+    @Override
     public int compare(long bPrefix, long aPrefix) {
       return UnsignedLongs.compare(aPrefix, bPrefix);
     }
@@ -140,6 +144,7 @@ public class PrefixComparators {
     @Override public boolean sortDescending() { return false; }
     @Override public boolean sortSigned() { return true; }
     @Override public boolean nullsFirst() { return true; }
+    @Override
     public int compare(long a, long b) {
       return Long.compare(a, b);
     }
@@ -149,6 +154,7 @@ public class PrefixComparators {
     @Override public boolean sortDescending() { return false; }
     @Override public boolean sortSigned() { return true; }
     @Override public boolean nullsFirst() { return false; }
+    @Override
     public int compare(long a, long b) {
       return Long.compare(a, b);
     }
@@ -158,6 +164,7 @@ public class PrefixComparators {
     @Override public boolean sortDescending() { return true; }
     @Override public boolean sortSigned() { return true; }
     @Override public boolean nullsFirst() { return true; }
+    @Override
     public int compare(long b, long a) {
       return Long.compare(a, b);
     }
@@ -167,6 +174,7 @@ public class PrefixComparators {
     @Override public boolean sortDescending() { return true; }
     @Override public boolean sortSigned() { return true; }
     @Override public boolean nullsFirst() { return false; }
+    @Override
     public int compare(long b, long a) {
       return Long.compare(a, b);
     }
diff --git 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
index 765ee035855..4de5440cc15 100644
--- 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
+++ 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
@@ -277,6 +277,7 @@ public final class UnsafeInMemorySorter {
       this.offset = offset;
     }
 
+    @Override
     public SortedIterator clone() {
       SortedIterator iter = new SortedIterator(numRecords, offset);
       iter.position = position;
diff --git a/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java 
b/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
index cf12de390f6..259843f2bfd 100644
--- a/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
+++ b/examples/src/main/java/org/apache/spark/examples/JavaLogQuery.java
@@ -69,6 +69,7 @@ public final class JavaLogQuery {
       return new Stats(count + other.count, numBytes + other.numBytes);
     }
 
+    @Override
     public String toString() {
       return String.format("bytes=%s\tn=%s", numBytes, count);
     }
diff --git 
a/examples/src/main/java/org/apache/spark/examples/sql/JavaUserDefinedTypedAggregation.java
 
b/examples/src/main/java/org/apache/spark/examples/sql/JavaUserDefinedTypedAggregation.java
index 78e9011be47..8926210f024 100644
--- 
a/examples/src/main/java/org/apache/spark/examples/sql/JavaUserDefinedTypedAggregation.java
+++ 
b/examples/src/main/java/org/apache/spark/examples/sql/JavaUserDefinedTypedAggregation.java
@@ -88,11 +88,13 @@ public class JavaUserDefinedTypedAggregation {
 
   public static class MyAverage extends Aggregator<Employee, Average, Double> {
     // A zero value for this aggregation. Should satisfy the property that any 
b + zero = b
+    @Override
     public Average zero() {
       return new Average(0L, 0L);
     }
     // Combine two values to produce a new value. For performance, the 
function may modify `buffer`
     // and return it instead of constructing a new object
+    @Override
     public Average reduce(Average buffer, Employee employee) {
       long newSum = buffer.getSum() + employee.getSalary();
       long newCount = buffer.getCount() + 1;
@@ -101,6 +103,7 @@ public class JavaUserDefinedTypedAggregation {
       return buffer;
     }
     // Merge two intermediate values
+    @Override
     public Average merge(Average b1, Average b2) {
       long mergedSum = b1.getSum() + b2.getSum();
       long mergedCount = b1.getCount() + b2.getCount();
@@ -109,14 +112,17 @@ public class JavaUserDefinedTypedAggregation {
       return b1;
     }
     // Transform the output of the reduction
+    @Override
     public Double finish(Average reduction) {
       return ((double) reduction.getSum()) / reduction.getCount();
     }
     // Specifies the Encoder for the intermediate value type
+    @Override
     public Encoder<Average> bufferEncoder() {
       return Encoders.bean(Average.class);
     }
     // Specifies the Encoder for the final output value type
+    @Override
     public Encoder<Double> outputEncoder() {
       return Encoders.DOUBLE();
     }
diff --git 
a/examples/src/main/java/org/apache/spark/examples/sql/JavaUserDefinedUntypedAggregation.java
 
b/examples/src/main/java/org/apache/spark/examples/sql/JavaUserDefinedUntypedAggregation.java
index d300018845a..6bfec797ff7 100644
--- 
a/examples/src/main/java/org/apache/spark/examples/sql/JavaUserDefinedUntypedAggregation.java
+++ 
b/examples/src/main/java/org/apache/spark/examples/sql/JavaUserDefinedUntypedAggregation.java
@@ -65,11 +65,13 @@ public class JavaUserDefinedUntypedAggregation {
 
   public static class MyAverage extends Aggregator<Long, Average, Double> {
     // A zero value for this aggregation. Should satisfy the property that any 
b + zero = b
+    @Override
     public Average zero() {
       return new Average(0L, 0L);
     }
     // Combine two values to produce a new value. For performance, the 
function may modify `buffer`
     // and return it instead of constructing a new object
+    @Override
     public Average reduce(Average buffer, Long data) {
       long newSum = buffer.getSum() + data;
       long newCount = buffer.getCount() + 1;
@@ -78,6 +80,7 @@ public class JavaUserDefinedUntypedAggregation {
       return buffer;
     }
     // Merge two intermediate values
+    @Override
     public Average merge(Average b1, Average b2) {
       long mergedSum = b1.getSum() + b2.getSum();
       long mergedCount = b1.getCount() + b2.getCount();
@@ -86,14 +89,17 @@ public class JavaUserDefinedUntypedAggregation {
       return b1;
     }
     // Transform the output of the reduction
+    @Override
     public Double finish(Average reduction) {
       return ((double) reduction.getSum()) / reduction.getCount();
     }
     // Specifies the Encoder for the intermediate value type
+    @Override
     public Encoder<Average> bufferEncoder() {
       return Encoders.bean(Average.class);
     }
     // Specifies the Encoder for the final output value type
+    @Override
     public Encoder<Double> outputEncoder() {
       return Encoders.DOUBLE();
     }
diff --git 
a/launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java 
b/launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java
index 8a1256f7341..eee15419209 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/AbstractLauncher.java
@@ -298,6 +298,7 @@ public abstract class AbstractLauncher<T extends 
AbstractLauncher<T>> {
       return true;
     }
 
+    @Override
     protected void handleExtraArgs(List<String> extra) {
       // No op.
     }
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java
index 40d360d84fb..58b28ed4a35 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/RowBasedKeyValueBatch.java
@@ -115,6 +115,7 @@ public abstract class RowBasedKeyValueBatch extends 
MemoryConsumer implements Cl
 
   public final int numRows() { return numRows; }
 
+  @Override
   public final void close() {
     if (page != null) {
       freePage(page);
@@ -169,6 +170,7 @@ public abstract class RowBasedKeyValueBatch extends 
MemoryConsumer implements Cl
    * space for new consumers. For RowBasedKeyValueBatch, we do not actually 
spill and return 0.
    * We should not throw OutOfMemory exception here because other associated 
consumers might spill
    */
+  @Override
   public final long spill(long size, MemoryConsumer trigger) throws 
IOException {
     logger.warn("Calling spill() on RowBasedKeyValueBatch. Will not spill but 
return 0.");
     return 0;
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java
index ca75b4c0c21..6bea714e7d5 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java
@@ -98,6 +98,7 @@ public final class UnsafeArrayData extends ArrayData 
implements Externalizable,
     assert ordinal < numElements : "ordinal (" + ordinal + ") should < " + 
numElements;
   }
 
+  @Override
   public Object[] array() {
     throw new UnsupportedOperationException("Not supported on 
UnsafeArrayData.");
   }
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/DelegatingCatalogExtension.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/DelegatingCatalogExtension.java
index 865ac553199..8bbfe535295 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/DelegatingCatalogExtension.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/DelegatingCatalogExtension.java
@@ -39,6 +39,7 @@ public abstract class DelegatingCatalogExtension implements 
CatalogExtension {
 
   private CatalogPlugin delegate;
 
+  @Override
   public final void setDelegateCatalog(CatalogPlugin delegate) {
     this.delegate = delegate;
   }
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableChange.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableChange.java
index c63d2d45861..72dbf6ca07a 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableChange.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/TableChange.java
@@ -536,6 +536,7 @@ public interface TableChange {
       this.nullable = nullable;
     }
 
+    @Override
     public String[] fieldNames() {
       return fieldNames;
     }
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/GeneralScalarExpression.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/GeneralScalarExpression.java
index 58082d5ee09..15071c960a2 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/GeneralScalarExpression.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/GeneralScalarExpression.java
@@ -165,6 +165,7 @@ public class GeneralScalarExpression implements Expression, 
Serializable {
   }
 
   public String name() { return name; }
+  @Override
   public Expression[] children() { return children; }
 
   @Override
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/filter/AlwaysFalse.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/filter/AlwaysFalse.java
index accdd1acd7d..5eced96ea4b 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/filter/AlwaysFalse.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/filter/AlwaysFalse.java
@@ -34,13 +34,16 @@ public final class AlwaysFalse extends Predicate implements 
Literal<Boolean> {
     super("ALWAYS_FALSE", new Predicate[]{});
   }
 
+  @Override
   public Boolean value() {
     return false;
   }
 
+  @Override
   public DataType dataType() {
     return DataTypes.BooleanType;
   }
 
+  @Override
   public String toString() { return "FALSE"; }
 }
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/filter/AlwaysTrue.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/filter/AlwaysTrue.java
index 5a14f64b9b7..483a580136c 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/filter/AlwaysTrue.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/expressions/filter/AlwaysTrue.java
@@ -34,13 +34,16 @@ public final class AlwaysTrue extends Predicate implements 
Literal<Boolean> {
     super("ALWAYS_TRUE", new Predicate[]{});
   }
 
+  @Override
   public Boolean value() {
     return true;
   }
 
+  @Override
   public DataType dataType() {
     return DataTypes.BooleanType;
   }
 
+  @Override
   public String toString() { return "TRUE"; }
 }
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/util/NumericHistogram.java 
b/sql/catalyst/src/main/java/org/apache/spark/sql/util/NumericHistogram.java
index 987c18e4129..947c2db29de 100644
--- a/sql/catalyst/src/main/java/org/apache/spark/sql/util/NumericHistogram.java
+++ b/sql/catalyst/src/main/java/org/apache/spark/sql/util/NumericHistogram.java
@@ -53,6 +53,7 @@ public class NumericHistogram {
     public double x;
     public double y;
 
+    @Override
     public int compareTo(Object other) {
       return Double.compare(x, ((Coord) other).x);
     }
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedDeltaByteArrayReader.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedDeltaByteArrayReader.java
index b3fc54a8d15..198d57267fc 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedDeltaByteArrayReader.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedDeltaByteArrayReader.java
@@ -107,6 +107,7 @@ public class VectorizedDeltaByteArrayReader extends 
VectorizedReaderBase
    * read corrupted files written with this bug, when reading a new page we 
need to recover the
    * previous page's last value to use it (if needed) to read the first value.
    */
+  @Override
   public void setPreviousReader(ValuesReader reader) {
     if (reader != null) {
       this.previous = ((VectorizedDeltaByteArrayReader) reader).previous;
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OffHeapColumnVector.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OffHeapColumnVector.java
index 711c00856e9..5674a091f6d 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OffHeapColumnVector.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OffHeapColumnVector.java
@@ -338,6 +338,7 @@ public final class OffHeapColumnVector extends 
WritableColumnVector {
    * This should only be called when the ColumnVector is dictionaryIds.
    * We have this separate method for dictionaryIds as per SPARK-16928.
    */
+  @Override
   public int getDictId(int rowId) {
     assert(dictionary == null)
             : "A ColumnVector dictionary should not have a dictionary for 
itself.";
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OnHeapColumnVector.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OnHeapColumnVector.java
index 505377bdb68..6e4a9c643e8 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OnHeapColumnVector.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/OnHeapColumnVector.java
@@ -330,6 +330,7 @@ public final class OnHeapColumnVector extends 
WritableColumnVector {
    * This should only be called when the ColumnVector is dictionaryIds.
    * We have this separate method for dictionaryIds as per SPARK-16928.
    */
+  @Override
   public int getDictId(int rowId) {
     assert(dictionary == null)
             : "A ColumnVector dictionary should not have a dictionary for 
itself.";


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to