This is an automated email from the ASF dual-hosted git repository.

yangjie01 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b0576fff9b72 [SPARK-45501][CORE][SQL] Use pattern matching for type 
checking and conversion
b0576fff9b72 is described below

commit b0576fff9b72880cd81a9d22c044dec329bc67d0
Author: yangjie01 <yangji...@baidu.com>
AuthorDate: Thu Oct 12 23:53:06 2023 +0800

    [SPARK-45501][CORE][SQL] Use pattern matching for type checking and 
conversion
    
    ### What changes were proposed in this pull request?
    This pr change to use pattern matching for type checking and conversion 
instead of the explicit type casting statement in Java code.
    
    The change refer to [JEP 394](https://openjdk.org/jeps/394), and this pr 
does not include parts of the `hive-thriftserver` module.
    
    Example:
    
    ```java
      if (obj instanceof String) {
        String str = (String) obj;
        System.out.println(str);
      }
    ```
    Can be replaced with
    ```java
      if (obj instanceof String str) {
        System.out.println(str);
      }
    ```
    
    ### Why are the changes needed?
    Using `JEP 394: Pattern Matching for instanceof` can bring the following 
benefits:
    
    1. **Code conciseness**: By eliminating explicit type conversion and 
redundant variable declarations, the code becomes more concise and easy to read.
    2. **Improved safety**: In the past, explicit type conversion was required, 
and if accidentally converted to the wrong type, a `ClassCastException` would 
be thrown at runtime. Now, as type checking and type conversion occur in the 
same step, such errors are no longer possible.
    3. **Better semantics**: Previously, instanceof and type casting were two 
independent steps, which could lead to unclear code intentions. Now, these two 
steps are merged into one, making the intentions of the code clearer.
    
    ### Does this PR introduce _any_ user-facing change?
    No
    
    ### How was this patch tested?
    Pass GitHub Actions
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No
    
    Closes #43327 from LuciferYang/jep-394.
    
    Authored-by: yangjie01 <yangji...@baidu.com>
    Signed-off-by: yangjie01 <yangji...@baidu.com>
---
 .../spark/util/kvstore/ArrayKeyIndexType.java      |  3 +--
 .../org/apache/spark/util/kvstore/CustomType1.java |  3 +--
 .../org/apache/spark/util/kvstore/CustomType2.java |  3 +--
 .../org/apache/spark/util/kvstore/IntKeyType.java  |  3 +--
 .../spark/network/protocol/ChunkFetchFailure.java  |  3 +--
 .../spark/network/protocol/ChunkFetchRequest.java  |  3 +--
 .../spark/network/protocol/ChunkFetchSuccess.java  |  3 +--
 .../network/protocol/MergedBlockMetaRequest.java   |  3 +--
 .../spark/network/protocol/MessageEncoder.java     |  3 +--
 .../spark/network/protocol/OneWayMessage.java      |  3 +--
 .../apache/spark/network/protocol/RpcFailure.java  |  3 +--
 .../apache/spark/network/protocol/RpcRequest.java  |  3 +--
 .../apache/spark/network/protocol/RpcResponse.java |  3 +--
 .../spark/network/protocol/StreamChunkId.java      |  3 +--
 .../spark/network/protocol/StreamFailure.java      |  3 +--
 .../spark/network/protocol/StreamRequest.java      |  3 +--
 .../spark/network/protocol/StreamResponse.java     |  3 +--
 .../spark/network/protocol/UploadStream.java       |  3 +--
 .../apache/spark/network/sasl/SparkSaslClient.java |  9 +++-----
 .../apache/spark/network/sasl/SparkSaslServer.java | 12 ++++------
 .../network/server/TransportChannelHandler.java    |  3 +--
 .../network/shuffle/ExternalBlockHandler.java      |  3 +--
 .../network/shuffle/ShuffleTransportContext.java   |  6 ++---
 .../shuffle/protocol/BlockPushReturnCode.java      |  3 +--
 .../network/shuffle/protocol/BlocksRemoved.java    |  3 +--
 .../shuffle/protocol/ExecutorShuffleInfo.java      |  3 +--
 .../shuffle/protocol/FinalizeShuffleMerge.java     |  3 +--
 .../shuffle/protocol/GetLocalDirsForExecutors.java |  3 +--
 .../shuffle/protocol/LocalDirsForExecutors.java    |  3 +--
 .../network/shuffle/protocol/MergeStatuses.java    |  3 +--
 .../spark/network/shuffle/protocol/OpenBlocks.java |  3 +--
 .../network/shuffle/protocol/PushBlockStream.java  |  3 +--
 .../network/shuffle/protocol/RegisterExecutor.java |  3 +--
 .../network/shuffle/protocol/RemoveBlocks.java     |  3 +--
 .../shuffle/protocol/RemoveShuffleMerge.java       |  3 +--
 .../network/shuffle/protocol/StreamHandle.java     |  3 +--
 .../network/shuffle/protocol/UploadBlock.java      |  3 +--
 .../shuffle/protocol/UploadBlockStream.java        |  3 +--
 .../network/yarn/YarnShuffleServiceMetrics.java    |  9 +++-----
 .../org/apache/spark/util/sketch/BitArray.java     |  3 +--
 .../apache/spark/util/sketch/BloomFilterImpl.java  | 11 +++------
 .../spark/util/sketch/CountMinSketchImpl.java      |  8 ++-----
 .../org/apache/spark/unsafe/types/UTF8String.java  |  6 ++---
 .../java/org/apache/spark/api/java/Optional.java   |  3 +--
 .../org/apache/spark/launcher/LauncherServer.java  |  6 ++---
 .../sql/connector/catalog/IdentifierImpl.java      |  3 +--
 .../expressions/SpecializedGettersReader.java      |  3 +--
 .../sql/catalyst/expressions/UnsafeArrayData.java  |  3 +--
 .../spark/sql/catalyst/expressions/UnsafeRow.java  |  3 +--
 .../sql/connector/catalog/ColumnDefaultValue.java  |  3 +--
 .../sql/connector/util/V2ExpressionSQLBuilder.java | 27 ++++++++--------------
 .../spark/sql/vectorized/ArrowColumnVector.java    |  9 +++-----
 .../spark/sql/vectorized/ColumnarBatchRow.java     |  6 ++---
 .../apache/spark/sql/vectorized/ColumnarRow.java   |  6 ++---
 .../datasources/orc/OrcColumnVectorUtils.java      |  9 +++-----
 .../parquet/ParquetVectorUpdaterFactory.java       |  6 ++---
 .../parquet/VectorizedParquetRecordReader.java     |  3 +--
 .../execution/vectorized/ColumnVectorUtils.java    | 12 ++++------
 .../execution/vectorized/MutableColumnarRow.java   |  9 +++-----
 .../execution/vectorized/WritableColumnVector.java | 21 ++++++-----------
 .../spark/sql/JavaBeanDeserializationSuite.java    | 12 ++++------
 .../org/apache/spark/sql/JavaDatasetSuite.java     |  3 +--
 .../sql/connector/JavaAdvancedDataSourceV2.java    |  9 +++-----
 63 files changed, 107 insertions(+), 218 deletions(-)

diff --git 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/ArrayKeyIndexType.java
 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/ArrayKeyIndexType.java
index dd53fdf0b1b4..5e57e7a07c60 100644
--- 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/ArrayKeyIndexType.java
+++ 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/ArrayKeyIndexType.java
@@ -29,8 +29,7 @@ public class ArrayKeyIndexType {
 
   @Override
   public boolean equals(Object o) {
-    if (o instanceof ArrayKeyIndexType) {
-      ArrayKeyIndexType other = (ArrayKeyIndexType) o;
+    if (o instanceof ArrayKeyIndexType other) {
       return Arrays.equals(key, other.key) && Arrays.equals(id, other.id);
     }
     return false;
diff --git 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/CustomType1.java 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/CustomType1.java
index ebb5c2c5ed55..81b9044f7f09 100644
--- 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/CustomType1.java
+++ 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/CustomType1.java
@@ -36,8 +36,7 @@ public class CustomType1 {
 
   @Override
   public boolean equals(Object o) {
-    if (o instanceof CustomType1) {
-      CustomType1 other = (CustomType1) o;
+    if (o instanceof CustomType1 other) {
       return id.equals(other.id) && name.equals(other.name);
     }
     return false;
diff --git 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/CustomType2.java 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/CustomType2.java
index 3bb66bb3ec70..6378f2219a15 100644
--- 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/CustomType2.java
+++ 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/CustomType2.java
@@ -30,8 +30,7 @@ public class CustomType2 {
 
   @Override
   public boolean equals(Object o) {
-    if (o instanceof CustomType2) {
-      CustomType2 other = (CustomType2) o;
+    if (o instanceof CustomType2 other) {
       return id.equals(other.id) && parentId.equals(other.parentId);
     }
     return false;
diff --git 
a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/IntKeyType.java 
b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/IntKeyType.java
index f7051246f77b..8d3190e510a4 100644
--- a/common/kvstore/src/test/java/org/apache/spark/util/kvstore/IntKeyType.java
+++ b/common/kvstore/src/test/java/org/apache/spark/util/kvstore/IntKeyType.java
@@ -31,8 +31,7 @@ public class IntKeyType {
 
   @Override
   public boolean equals(Object o) {
-    if (o instanceof IntKeyType) {
-      IntKeyType other = (IntKeyType) o;
+    if (o instanceof IntKeyType other) {
       return key == other.key && id.equals(other.id) && 
values.equals(other.values);
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchFailure.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchFailure.java
index 0f1781cbf1f2..cbad4c61b9b4 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchFailure.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchFailure.java
@@ -62,8 +62,7 @@ public final class ChunkFetchFailure extends AbstractMessage 
implements Response
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof ChunkFetchFailure) {
-      ChunkFetchFailure o = (ChunkFetchFailure) other;
+    if (other instanceof ChunkFetchFailure o) {
       return streamChunkId.equals(o.streamChunkId) && 
errorString.equals(o.errorString);
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchRequest.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchRequest.java
index 7b034d5c2f59..2865388b3297 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchRequest.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchRequest.java
@@ -56,8 +56,7 @@ public final class ChunkFetchRequest extends AbstractMessage 
implements RequestM
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof ChunkFetchRequest) {
-      ChunkFetchRequest o = (ChunkFetchRequest) other;
+    if (other instanceof ChunkFetchRequest o) {
       return streamChunkId.equals(o.streamChunkId);
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchSuccess.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchSuccess.java
index eaad143fc3f5..aa89b2062f62 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchSuccess.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/ChunkFetchSuccess.java
@@ -75,8 +75,7 @@ public final class ChunkFetchSuccess extends 
AbstractResponseMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof ChunkFetchSuccess) {
-      ChunkFetchSuccess o = (ChunkFetchSuccess) other;
+    if (other instanceof ChunkFetchSuccess o) {
       return streamChunkId.equals(o.streamChunkId) && super.equals(o);
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/MergedBlockMetaRequest.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/MergedBlockMetaRequest.java
index c85d104fdd0f..3723730ebc06 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/MergedBlockMetaRequest.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/MergedBlockMetaRequest.java
@@ -84,8 +84,7 @@ public class MergedBlockMetaRequest extends AbstractMessage 
implements RequestMe
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof MergedBlockMetaRequest) {
-      MergedBlockMetaRequest o = (MergedBlockMetaRequest) other;
+    if (other instanceof MergedBlockMetaRequest o) {
       return requestId == o.requestId && shuffleId == o.shuffleId &&
         shuffleMergeId == o.shuffleMergeId && reduceId == o.reduceId &&
         Objects.equal(appId, o.appId);
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageEncoder.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageEncoder.java
index 06dc447309dd..00de47dc9fc2 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageEncoder.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageEncoder.java
@@ -59,8 +59,7 @@ public final class MessageEncoder extends 
MessageToMessageEncoder<Message> {
         isBodyInFrame = in.isBodyInFrame();
       } catch (Exception e) {
         in.body().release();
-        if (in instanceof AbstractResponseMessage) {
-          AbstractResponseMessage resp = (AbstractResponseMessage) in;
+        if (in instanceof AbstractResponseMessage resp) {
           // Re-encode this message as a failure response.
           String error = e.getMessage() != null ? e.getMessage() : "null";
           logger.error(String.format("Error processing %s for client %s",
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
index 719f6c64c5de..91c818f3612a 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/OneWayMessage.java
@@ -66,8 +66,7 @@ public final class OneWayMessage extends AbstractMessage 
implements RequestMessa
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof OneWayMessage) {
-      OneWayMessage o = (OneWayMessage) other;
+    if (other instanceof OneWayMessage o) {
       return super.equals(o);
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcFailure.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcFailure.java
index 6e4f5687d16c..02a45d68c650 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcFailure.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcFailure.java
@@ -60,8 +60,7 @@ public final class RpcFailure extends AbstractMessage 
implements ResponseMessage
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof RpcFailure) {
-      RpcFailure o = (RpcFailure) other;
+    if (other instanceof RpcFailure o) {
       return requestId == o.requestId && errorString.equals(o.errorString);
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
index f2609ce2dbdb..a7dbe1283b31 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcRequest.java
@@ -72,8 +72,7 @@ public final class RpcRequest extends AbstractMessage 
implements RequestMessage
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof RpcRequest) {
-      RpcRequest o = (RpcRequest) other;
+    if (other instanceof RpcRequest o) {
       return requestId == o.requestId && super.equals(o);
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
index 51b36ea18336..85709e36f83e 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/RpcResponse.java
@@ -72,8 +72,7 @@ public final class RpcResponse extends 
AbstractResponseMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof RpcResponse) {
-      RpcResponse o = (RpcResponse) other;
+    if (other instanceof RpcResponse o) {
       return requestId == o.requestId && super.equals(o);
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamChunkId.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamChunkId.java
index 29201d135ba9..ae795ca4d147 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamChunkId.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamChunkId.java
@@ -60,8 +60,7 @@ public final class StreamChunkId implements Encodable {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof StreamChunkId) {
-      StreamChunkId o = (StreamChunkId) other;
+    if (other instanceof StreamChunkId o) {
       return streamId == o.streamId && chunkIndex == o.chunkIndex;
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
index 06836f5eea39..9a7bf2f65af3 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamFailure.java
@@ -62,8 +62,7 @@ public final class StreamFailure extends AbstractMessage 
implements ResponseMess
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof StreamFailure) {
-      StreamFailure o = (StreamFailure) other;
+    if (other instanceof StreamFailure o) {
       return streamId.equals(o.streamId) && error.equals(o.error);
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
index 3d035e5c94f2..5906b4d380d6 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamRequest.java
@@ -61,8 +61,7 @@ public final class StreamRequest extends AbstractMessage 
implements RequestMessa
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof StreamRequest) {
-      StreamRequest o = (StreamRequest) other;
+    if (other instanceof StreamRequest o) {
       return streamId.equals(o.streamId);
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
index f30605ce836f..0c0aa5c9a635 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/StreamResponse.java
@@ -75,8 +75,7 @@ public final class StreamResponse extends 
AbstractResponseMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof StreamResponse) {
-      StreamResponse o = (StreamResponse) other;
+    if (other instanceof StreamResponse o) {
       return byteCount == o.byteCount && streamId.equals(o.streamId);
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/UploadStream.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/UploadStream.java
index fb50801a51ba..4722f39dfa9d 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/UploadStream.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/UploadStream.java
@@ -91,8 +91,7 @@ public final class UploadStream extends AbstractMessage 
implements RequestMessag
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof UploadStream) {
-      UploadStream o = (UploadStream) other;
+    if (other instanceof UploadStream o) {
       return requestId == o.requestId && super.equals(o);
     }
     return false;
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java
 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java
index 05a5afe195e8..524ff0a31065 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslClient.java
@@ -127,17 +127,14 @@ public class SparkSaslClient implements 
SaslEncryptionBackend {
     public void handle(Callback[] callbacks) throws 
UnsupportedCallbackException {
 
       for (Callback callback : callbacks) {
-        if (callback instanceof NameCallback) {
+        if (callback instanceof NameCallback nc) {
           logger.trace("SASL client callback: setting username");
-          NameCallback nc = (NameCallback) callback;
           
nc.setName(encodeIdentifier(secretKeyHolder.getSaslUser(secretKeyId)));
-        } else if (callback instanceof PasswordCallback) {
+        } else if (callback instanceof PasswordCallback pc) {
           logger.trace("SASL client callback: setting password");
-          PasswordCallback pc = (PasswordCallback) callback;
           
pc.setPassword(encodePassword(secretKeyHolder.getSecretKey(secretKeyId)));
-        } else if (callback instanceof RealmCallback) {
+        } else if (callback instanceof RealmCallback rc) {
           logger.trace("SASL client callback: setting realm");
-          RealmCallback rc = (RealmCallback) callback;
           rc.setText(rc.getDefaultText());
         } else if (callback instanceof RealmChoiceCallback) {
           // ignore (?)
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java
 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java
index e22e09d2a22e..26e5718cb4a7 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SparkSaslServer.java
@@ -156,20 +156,16 @@ public class SparkSaslServer implements 
SaslEncryptionBackend {
     @Override
     public void handle(Callback[] callbacks) throws 
UnsupportedCallbackException {
       for (Callback callback : callbacks) {
-        if (callback instanceof NameCallback) {
+        if (callback instanceof NameCallback nc) {
           logger.trace("SASL server callback: setting username");
-          NameCallback nc = (NameCallback) callback;
           
nc.setName(encodeIdentifier(secretKeyHolder.getSaslUser(secretKeyId)));
-        } else if (callback instanceof PasswordCallback) {
+        } else if (callback instanceof PasswordCallback pc) {
           logger.trace("SASL server callback: setting password");
-          PasswordCallback pc = (PasswordCallback) callback;
           
pc.setPassword(encodePassword(secretKeyHolder.getSecretKey(secretKeyId)));
-        } else if (callback instanceof RealmCallback) {
+        } else if (callback instanceof RealmCallback rc) {
           logger.trace("SASL server callback: setting realm");
-          RealmCallback rc = (RealmCallback) callback;
           rc.setText(rc.getDefaultText());
-        } else if (callback instanceof AuthorizeCallback) {
-          AuthorizeCallback ac = (AuthorizeCallback) callback;
+        } else if (callback instanceof AuthorizeCallback ac) {
           String authId = ac.getAuthenticationID();
           String authzId = ac.getAuthorizationID();
           ac.setAuthorized(authId.equals(authzId));
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
index f55ca2204cdb..e12f9120fdbb 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
@@ -148,8 +148,7 @@ public class TransportChannelHandler extends 
SimpleChannelInboundHandler<Message
   /** Triggered based on events from an {@link 
io.netty.handler.timeout.IdleStateHandler}. */
   @Override
   public void userEventTriggered(ChannelHandlerContext ctx, Object evt) throws 
Exception {
-    if (evt instanceof IdleStateEvent) {
-      IdleStateEvent e = (IdleStateEvent) evt;
+    if (evt instanceof IdleStateEvent e) {
       // See class comment for timeout semantics. In addition to ensuring we 
only timeout while
       // there are outstanding requests, we also do a secondary consistency 
check to ensure
       // there's no race between the idle timeout and incrementing the 
numOutstandingRequests
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
index 3d7c1b1ca0cc..efcc83f409ea 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
@@ -128,8 +128,7 @@ public class ExternalBlockHandler extends RpcHandler
       ByteBuffer messageHeader,
       RpcResponseCallback callback) {
     BlockTransferMessage msgObj = 
BlockTransferMessage.Decoder.fromByteBuffer(messageHeader);
-    if (msgObj instanceof PushBlockStream) {
-      PushBlockStream message = (PushBlockStream) msgObj;
+    if (msgObj instanceof PushBlockStream message) {
       checkAuth(client, message.appId);
       return mergeManager.receiveBlockDataAsStream(message);
     } else {
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleTransportContext.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleTransportContext.java
index 39ddf2c2a7ed..e0971d49510a 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleTransportContext.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ShuffleTransportContext.java
@@ -131,8 +131,7 @@ public class ShuffleTransportContext extends 
TransportContext {
         List<Object> list) throws Exception {
       delegate.decode(channelHandlerContext, byteBuf, list);
       Object msg = list.get(list.size() - 1);
-      if (msg instanceof RpcRequest) {
-        RpcRequest req = (RpcRequest) msg;
+      if (msg instanceof RpcRequest req) {
         ByteBuffer buffer = req.body().nioByteBuffer();
         byte type = Unpooled.wrappedBuffer(buffer).readByte();
         if (type == BlockTransferMessage.Type.FINALIZE_SHUFFLE_MERGE.id()) {
@@ -171,8 +170,7 @@ public class ShuffleTransportContext extends 
TransportContext {
 
     @Override
     public boolean acceptInboundMessage(Object msg) throws Exception {
-      if (msg instanceof RpcRequestInternal) {
-        RpcRequestInternal rpcRequestInternal = (RpcRequestInternal) msg;
+      if (msg instanceof RpcRequestInternal rpcRequestInternal) {
         return rpcRequestInternal.messageType == 
BlockTransferMessage.Type.FINALIZE_SHUFFLE_MERGE;
       }
       return false;
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlockPushReturnCode.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlockPushReturnCode.java
index d3f170f91507..05347c671e00 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlockPushReturnCode.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlockPushReturnCode.java
@@ -68,8 +68,7 @@ public class BlockPushReturnCode extends BlockTransferMessage 
{
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof BlockPushReturnCode) {
-      BlockPushReturnCode o = (BlockPushReturnCode) other;
+    if (other instanceof BlockPushReturnCode o) {
       return returnCode == o.returnCode && Objects.equals(failureBlockId, 
o.failureBlockId);
     }
     return false;
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlocksRemoved.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlocksRemoved.java
index 452f70c6cd22..2a050ce40b84 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlocksRemoved.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/BlocksRemoved.java
@@ -51,8 +51,7 @@ public class BlocksRemoved extends BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof BlocksRemoved) {
-      BlocksRemoved o = (BlocksRemoved) other;
+    if (other instanceof BlocksRemoved o) {
       return numRemovedBlocks == o.numRemovedBlocks;
     }
     return false;
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/ExecutorShuffleInfo.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/ExecutorShuffleInfo.java
index ead13f5b14f1..8a3ccdef2920 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/ExecutorShuffleInfo.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/ExecutorShuffleInfo.java
@@ -69,8 +69,7 @@ public class ExecutorShuffleInfo implements Encodable {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof ExecutorShuffleInfo) {
-      ExecutorShuffleInfo o = (ExecutorShuffleInfo) other;
+    if (other instanceof ExecutorShuffleInfo o) {
       return Arrays.equals(localDirs, o.localDirs)
         && subDirsPerLocalDir == o.subDirsPerLocalDir
         && Objects.equals(shuffleManager, o.shuffleManager);
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/FinalizeShuffleMerge.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/FinalizeShuffleMerge.java
index e99fe1707092..cd5e005348f4 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/FinalizeShuffleMerge.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/FinalizeShuffleMerge.java
@@ -69,8 +69,7 @@ public class FinalizeShuffleMerge extends 
BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof FinalizeShuffleMerge) {
-      FinalizeShuffleMerge o = (FinalizeShuffleMerge) other;
+    if (other instanceof FinalizeShuffleMerge o) {
       return Objects.equal(appId, o.appId)
         && appAttemptId == o.appAttemptId
         && shuffleId == o.shuffleId
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/GetLocalDirsForExecutors.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/GetLocalDirsForExecutors.java
index 47f617c5e0a0..f118f0604d9e 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/GetLocalDirsForExecutors.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/GetLocalDirsForExecutors.java
@@ -57,8 +57,7 @@ public class GetLocalDirsForExecutors extends 
BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof GetLocalDirsForExecutors) {
-      GetLocalDirsForExecutors o = (GetLocalDirsForExecutors) other;
+    if (other instanceof GetLocalDirsForExecutors o) {
       return appId.equals(o.appId) && Arrays.equals(execIds, o.execIds);
     }
     return false;
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/LocalDirsForExecutors.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/LocalDirsForExecutors.java
index 9e2f0668cbd2..b65f351d3cf3 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/LocalDirsForExecutors.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/LocalDirsForExecutors.java
@@ -73,8 +73,7 @@ public class LocalDirsForExecutors extends 
BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof LocalDirsForExecutors) {
-      LocalDirsForExecutors o = (LocalDirsForExecutors) other;
+    if (other instanceof LocalDirsForExecutors o) {
       return Arrays.equals(execIds, o.execIds)
         && Arrays.equals(numLocalDirsByExec, o.numLocalDirsByExec)
         && Arrays.equals(allLocalDirs, o.allLocalDirs);
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/MergeStatuses.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/MergeStatuses.java
index b6bfc302d218..892c3a5e7795 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/MergeStatuses.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/MergeStatuses.java
@@ -95,8 +95,7 @@ public class MergeStatuses extends BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof MergeStatuses) {
-      MergeStatuses o = (MergeStatuses) other;
+    if (other instanceof MergeStatuses o) {
       return Objects.equal(shuffleId, o.shuffleId)
         && Objects.equal(shuffleMergeId, o.shuffleMergeId)
         && Arrays.equals(bitmaps, o.bitmaps)
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/OpenBlocks.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/OpenBlocks.java
index 91f203764ecd..49288eef5c5d 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/OpenBlocks.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/OpenBlocks.java
@@ -60,8 +60,7 @@ public class OpenBlocks extends BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof OpenBlocks) {
-      OpenBlocks o = (OpenBlocks) other;
+    if (other instanceof OpenBlocks o) {
       return Objects.equals(appId, o.appId)
         && Objects.equals(execId, o.execId)
         && Arrays.equals(blockIds, o.blockIds);
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/PushBlockStream.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/PushBlockStream.java
index fc9900bae1e8..ceab54a1c061 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/PushBlockStream.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/PushBlockStream.java
@@ -87,8 +87,7 @@ public class PushBlockStream extends BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof PushBlockStream) {
-      PushBlockStream o = (PushBlockStream) other;
+    if (other instanceof PushBlockStream o) {
       return Objects.equal(appId, o.appId)
         && appAttemptId == o.appAttemptId
         && shuffleId == o.shuffleId
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/RegisterExecutor.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/RegisterExecutor.java
index 618982072620..9805af67b9f2 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/RegisterExecutor.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/RegisterExecutor.java
@@ -65,8 +65,7 @@ public class RegisterExecutor extends BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof RegisterExecutor) {
-      RegisterExecutor o = (RegisterExecutor) other;
+    if (other instanceof RegisterExecutor o) {
       return Objects.equals(appId, o.appId)
         && Objects.equals(execId, o.execId)
         && Objects.equals(executorInfo, o.executorInfo);
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/RemoveBlocks.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/RemoveBlocks.java
index 6c194d1a14cf..7032942331c3 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/RemoveBlocks.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/RemoveBlocks.java
@@ -60,8 +60,7 @@ public class RemoveBlocks extends BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof RemoveBlocks) {
-      RemoveBlocks o = (RemoveBlocks) other;
+    if (other instanceof RemoveBlocks o) {
       return Objects.equals(appId, o.appId)
         && Objects.equals(execId, o.execId)
         && Arrays.equals(blockIds, o.blockIds);
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/RemoveShuffleMerge.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/RemoveShuffleMerge.java
index 3bcb57a70bcb..8ce2e05e6097 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/RemoveShuffleMerge.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/RemoveShuffleMerge.java
@@ -69,8 +69,7 @@ public class RemoveShuffleMerge extends BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other != null && other instanceof RemoveShuffleMerge) {
-      RemoveShuffleMerge o = (RemoveShuffleMerge) other;
+    if (other != null && other instanceof RemoveShuffleMerge o) {
       return Objects.equal(appId, o.appId)
         && appAttemptId == o.appAttemptId
         && shuffleId == o.shuffleId
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/StreamHandle.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/StreamHandle.java
index 20954914a7ce..aebd6f0d5a62 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/StreamHandle.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/StreamHandle.java
@@ -57,8 +57,7 @@ public class StreamHandle extends BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof StreamHandle) {
-      StreamHandle o = (StreamHandle) other;
+    if (other instanceof StreamHandle o) {
       return Objects.equals(streamId, o.streamId)
         && Objects.equals(numChunks, o.numChunks);
     }
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/UploadBlock.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/UploadBlock.java
index c5e07d0d991b..fad187971e09 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/UploadBlock.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/UploadBlock.java
@@ -79,8 +79,7 @@ public class UploadBlock extends BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof UploadBlock) {
-      UploadBlock o = (UploadBlock) other;
+    if (other instanceof UploadBlock o) {
       return Objects.equals(appId, o.appId)
         && Objects.equals(execId, o.execId)
         && Objects.equals(blockId, o.blockId)
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/UploadBlockStream.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/UploadBlockStream.java
index a1ac9da0956d..95d0b3835562 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/UploadBlockStream.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/protocol/UploadBlockStream.java
@@ -63,8 +63,7 @@ public class UploadBlockStream extends BlockTransferMessage {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof UploadBlockStream) {
-      UploadBlockStream o = (UploadBlockStream) other;
+    if (other instanceof UploadBlockStream o) {
       return Objects.equals(blockId, o.blockId)
         && Arrays.equals(metadata, o.metadata);
     }
diff --git 
a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java
 
b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java
index d843a6719e6b..9230c2b40ed1 100644
--- 
a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java
+++ 
b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java
@@ -63,10 +63,9 @@ class YarnShuffleServiceMetrics implements MetricsSource {
   public static void collectMetric(
     MetricsRecordBuilder metricsRecordBuilder, String name, Metric metric) {
 
-    if (metric instanceof Timer) {
+    if (metric instanceof Timer t) {
       // Timer records both the operations count and delay
       // Snapshot inside the Timer provides the information for the operation 
delay
-      Timer t = (Timer) metric;
       Snapshot snapshot = t.getSnapshot();
       metricsRecordBuilder
         .addCounter(new ShuffleServiceMetricsInfo(name + "_count", "Count of 
timer " + name),
@@ -107,8 +106,7 @@ class YarnShuffleServiceMetrics implements MetricsSource {
           getShuffleServiceMetricsInfoForGenericValue(name, percentileStr),
           snapshot.getValue(percentileThousands / 1000.0));
       }
-    } else if (metric instanceof Meter) {
-      Meter m = (Meter) metric;
+    } else if (metric instanceof Meter m) {
       metricsRecordBuilder
         .addCounter(new ShuffleServiceMetricsInfo(name + "_count", "Count of 
meter " + name),
           m.getCount())
@@ -141,8 +139,7 @@ class YarnShuffleServiceMetrics implements MetricsSource {
         throw new IllegalStateException(
                 "Not supported class type of metric[" + name + "] for value " 
+ gaugeValue);
       }
-    } else if (metric instanceof Counter) {
-      Counter c = (Counter) metric;
+    } else if (metric instanceof Counter c) {
       long counterValue = c.getCount();
       
metricsRecordBuilder.addGauge(getShuffleServiceMetricsInfoForCounter(name), 
counterValue);
     }
diff --git 
a/common/sketch/src/main/java/org/apache/spark/util/sketch/BitArray.java 
b/common/sketch/src/main/java/org/apache/spark/util/sketch/BitArray.java
index 31857443e8c6..51d661d42736 100644
--- a/common/sketch/src/main/java/org/apache/spark/util/sketch/BitArray.java
+++ b/common/sketch/src/main/java/org/apache/spark/util/sketch/BitArray.java
@@ -115,8 +115,7 @@ final class BitArray {
   @Override
   public boolean equals(Object other) {
     if (this == other) return true;
-    if (!(other instanceof BitArray)) return false;
-    BitArray that = (BitArray) other;
+    if (!(other instanceof BitArray that)) return false;
     return Arrays.equals(data, that.data);
   }
 
diff --git 
a/common/sketch/src/main/java/org/apache/spark/util/sketch/BloomFilterImpl.java 
b/common/sketch/src/main/java/org/apache/spark/util/sketch/BloomFilterImpl.java
index 3fba5e332522..18ac5a2e4297 100644
--- 
a/common/sketch/src/main/java/org/apache/spark/util/sketch/BloomFilterImpl.java
+++ 
b/common/sketch/src/main/java/org/apache/spark/util/sketch/BloomFilterImpl.java
@@ -42,12 +42,10 @@ class BloomFilterImpl extends BloomFilter implements 
Serializable {
       return true;
     }
 
-    if (!(other instanceof BloomFilterImpl)) {
+    if (!(other instanceof BloomFilterImpl that)) {
       return false;
     }
 
-    BloomFilterImpl that = (BloomFilterImpl) other;
-
     return this.numHashFunctions == that.numHashFunctions && 
this.bits.equals(that.bits);
   }
 
@@ -183,11 +181,10 @@ class BloomFilterImpl extends BloomFilter implements 
Serializable {
       return false;
     }
 
-    if (!(other instanceof BloomFilterImpl)) {
+    if (!(other instanceof BloomFilterImpl that)) {
       return false;
     }
 
-    BloomFilterImpl that = (BloomFilterImpl) other;
     return this.bitSize() == that.bitSize() && this.numHashFunctions == 
that.numHashFunctions;
   }
 
@@ -219,14 +216,12 @@ class BloomFilterImpl extends BloomFilter implements 
Serializable {
       throw new IncompatibleMergeException("Cannot merge null bloom filter");
     }
 
-    if (!(other instanceof BloomFilterImpl)) {
+    if (!(other instanceof BloomFilterImpl that)) {
       throw new IncompatibleMergeException(
         "Cannot merge bloom filter of class " + other.getClass().getName()
       );
     }
 
-    BloomFilterImpl that = (BloomFilterImpl) other;
-
     if (this.bitSize() != that.bitSize()) {
       throw new IncompatibleMergeException("Cannot merge bloom filters with 
different bit size");
     }
diff --git 
a/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java
 
b/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java
index 80e71738198b..220e85195536 100644
--- 
a/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java
+++ 
b/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java
@@ -70,12 +70,10 @@ class CountMinSketchImpl extends CountMinSketch implements 
Serializable {
       return true;
     }
 
-    if (!(other instanceof CountMinSketchImpl)) {
+    if (!(other instanceof CountMinSketchImpl that)) {
       return false;
     }
 
-    CountMinSketchImpl that = (CountMinSketchImpl) other;
-
     return
       this.depth == that.depth &&
       this.width == that.width &&
@@ -268,14 +266,12 @@ class CountMinSketchImpl extends CountMinSketch 
implements Serializable {
       throw new IncompatibleMergeException("Cannot merge null estimator");
     }
 
-    if (!(other instanceof CountMinSketchImpl)) {
+    if (!(other instanceof CountMinSketchImpl that)) {
       throw new IncompatibleMergeException(
           "Cannot merge estimator of class " + other.getClass().getName()
       );
     }
 
-    CountMinSketchImpl that = (CountMinSketchImpl) other;
-
     if (this.depth != that.depth) {
       throw new IncompatibleMergeException("Cannot merge estimators of 
different depth");
     }
diff --git 
a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java 
b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
index 1d96f8126e23..e362a13eb5fb 100644
--- a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
+++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
@@ -185,8 +185,7 @@ public final class UTF8String implements 
Comparable<UTF8String>, Externalizable,
    */
   @Nonnull
   public ByteBuffer getByteBuffer() {
-    if (base instanceof byte[] && offset >= BYTE_ARRAY_OFFSET) {
-      final byte[] bytes = (byte[]) base;
+    if (base instanceof byte[] bytes && offset >= BYTE_ARRAY_OFFSET) {
 
       // the offset includes an object header... this is only needed for 
unsafe copies
       final long arrayOffset = offset - BYTE_ARRAY_OFFSET;
@@ -1401,8 +1400,7 @@ public final class UTF8String implements 
Comparable<UTF8String>, Externalizable,
 
   @Override
   public boolean equals(final Object other) {
-    if (other instanceof UTF8String) {
-      UTF8String o = (UTF8String) other;
+    if (other instanceof UTF8String o) {
       if (numBytes != o.numBytes) {
         return false;
       }
diff --git a/core/src/main/java/org/apache/spark/api/java/Optional.java 
b/core/src/main/java/org/apache/spark/api/java/Optional.java
index fd0f495ca29d..362149c92145 100644
--- a/core/src/main/java/org/apache/spark/api/java/Optional.java
+++ b/core/src/main/java/org/apache/spark/api/java/Optional.java
@@ -168,10 +168,9 @@ public final class Optional<T> implements Serializable {
 
   @Override
   public boolean equals(Object obj) {
-    if (!(obj instanceof Optional)) {
+    if (!(obj instanceof Optional<?> other)) {
       return false;
     }
-    Optional<?> other = (Optional<?>) obj;
     return Objects.equals(value, other.value);
   }
 
diff --git 
a/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java 
b/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java
index 125205f416d3..be7c0864c2f4 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java
@@ -317,10 +317,9 @@ class LauncherServer implements Closeable {
     @Override
     protected void handle(Message msg) throws IOException {
       try {
-        if (msg instanceof Hello) {
+        if (msg instanceof Hello hello) {
           timeout.cancel();
           timeout = null;
-          Hello hello = (Hello) msg;
           AbstractAppHandle handle = secretToPendingApps.remove(hello.secret);
           if (handle != null) {
             handle.setConnection(this);
@@ -334,8 +333,7 @@ class LauncherServer implements Closeable {
           if (handle == null) {
             throw new IllegalArgumentException("Expected hello, got: " + 
msgClassName);
           }
-          if (msg instanceof SetAppId) {
-            SetAppId set = (SetAppId) msg;
+          if (msg instanceof SetAppId set) {
             handle.setAppId(set.appId);
           } else if (msg instanceof SetState) {
             handle.setState(((SetState)msg).state);
diff --git 
a/sql/api/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
 
b/sql/api/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
index 61894b18fe69..17895e73d9fc 100644
--- 
a/sql/api/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
+++ 
b/sql/api/src/main/java/org/apache/spark/sql/connector/catalog/IdentifierImpl.java
@@ -65,8 +65,7 @@ class IdentifierImpl implements Identifier {
   @Override
   public boolean equals(Object o) {
     if (this == o) return true;
-    if (!(o instanceof IdentifierImpl)) return false;
-    IdentifierImpl that = (IdentifierImpl) o;
+    if (!(o instanceof IdentifierImpl that)) return false;
     return Arrays.equals(namespace, that.namespace) && name.equals(that.name);
   }
 
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/SpecializedGettersReader.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/SpecializedGettersReader.java
index be50350b106a..91f04c3d327a 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/SpecializedGettersReader.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/SpecializedGettersReader.java
@@ -57,8 +57,7 @@ public final class SpecializedGettersReader {
     if (physicalDataType instanceof PhysicalStringType) {
       return obj.getUTF8String(ordinal);
     }
-    if (physicalDataType instanceof PhysicalDecimalType) {
-      PhysicalDecimalType dt = (PhysicalDecimalType) physicalDataType;
+    if (physicalDataType instanceof PhysicalDecimalType dt) {
       return obj.getDecimal(ordinal, dt.precision(), dt.scale());
     }
     if (physicalDataType instanceof PhysicalCalendarIntervalType) {
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java
index 6bea714e7d58..ea6f1e05422b 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeArrayData.java
@@ -329,8 +329,7 @@ public final class UnsafeArrayData extends ArrayData 
implements Externalizable,
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof UnsafeArrayData) {
-      UnsafeArrayData o = (UnsafeArrayData) other;
+    if (other instanceof UnsafeArrayData o) {
       return (sizeInBytes == o.sizeInBytes) &&
         ByteArrayMethods.arrayEquals(baseObject, baseOffset, o.baseObject, 
o.baseOffset,
           sizeInBytes);
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
index d2433292fc7b..8f9d5919e1d9 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
@@ -538,8 +538,7 @@ public final class UnsafeRow extends InternalRow implements 
Externalizable, Kryo
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof UnsafeRow) {
-      UnsafeRow o = (UnsafeRow) other;
+    if (other instanceof UnsafeRow o) {
       return (sizeInBytes == o.sizeInBytes) &&
         ByteArrayMethods.arrayEquals(baseObject, baseOffset, o.baseObject, 
o.baseOffset,
           sizeInBytes);
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/ColumnDefaultValue.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/ColumnDefaultValue.java
index b8e75c11c813..cc3ff63fb29b 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/ColumnDefaultValue.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/ColumnDefaultValue.java
@@ -67,8 +67,7 @@ public class ColumnDefaultValue {
   @Override
   public boolean equals(Object o) {
     if (this == o) return true;
-    if (!(o instanceof ColumnDefaultValue)) return false;
-    ColumnDefaultValue that = (ColumnDefaultValue) o;
+    if (!(o instanceof ColumnDefaultValue that)) return false;
     return sql.equals(that.sql) && value.equals(that.value);
   }
 
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java
index 9ca0fe4787f1..e529a8e9250f 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java
@@ -53,18 +53,14 @@ public class V2ExpressionSQLBuilder {
       return visitLiteral((Literal<?>) expr);
     } else if (expr instanceof NamedReference) {
       return visitNamedReference((NamedReference) expr);
-    } else if (expr instanceof Cast) {
-      Cast cast = (Cast) expr;
+    } else if (expr instanceof Cast cast) {
       return visitCast(build(cast.expression()), cast.dataType());
-    } else if (expr instanceof Extract) {
-      Extract extract = (Extract) expr;
+    } else if (expr instanceof Extract extract) {
       return visitExtract(extract.field(), build(extract.source()));
-    } else if (expr instanceof SortOrder) {
-      SortOrder sortOrder = (SortOrder) expr;
+    } else if (expr instanceof SortOrder sortOrder) {
       return visitSortOrder(
         build(sortOrder.expression()), sortOrder.direction(), 
sortOrder.nullOrdering());
-    } else if (expr instanceof GeneralScalarExpression) {
-      GeneralScalarExpression e = (GeneralScalarExpression) expr;
+    } else if (expr instanceof GeneralScalarExpression e) {
       String name = e.name();
       switch (name) {
         case "IN": {
@@ -181,26 +177,21 @@ public class V2ExpressionSQLBuilder {
         default:
           return visitUnexpectedExpr(expr);
       }
-    } else if (expr instanceof Min) {
-      Min min = (Min) expr;
+    } else if (expr instanceof Min min) {
       return visitAggregateFunction("MIN", false,
         expressionsToStringArray(min.children()));
-    } else if (expr instanceof Max) {
-      Max max = (Max) expr;
+    } else if (expr instanceof Max max) {
       return visitAggregateFunction("MAX", false,
         expressionsToStringArray(max.children()));
-    } else if (expr instanceof Count) {
-      Count count = (Count) expr;
+    } else if (expr instanceof Count count) {
       return visitAggregateFunction("COUNT", count.isDistinct(),
         expressionsToStringArray(count.children()));
-    } else if (expr instanceof Sum) {
-      Sum sum = (Sum) expr;
+    } else if (expr instanceof Sum sum) {
       return visitAggregateFunction("SUM", sum.isDistinct(),
         expressionsToStringArray(sum.children()));
     } else if (expr instanceof CountStar) {
       return visitAggregateFunction("COUNT", false, new String[]{"*"});
-    } else if (expr instanceof Avg) {
-      Avg avg = (Avg) expr;
+    } else if (expr instanceof Avg avg) {
       return visitAggregateFunction("AVG", avg.isDistinct(),
         expressionsToStringArray(avg.children()));
     } else if (expr instanceof GeneralAggregateFunc) {
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
index 34870597ee32..31ecf5cbe17f 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
@@ -181,14 +181,11 @@ public class ArrowColumnVector extends ColumnVector {
       accessor = new TimestampAccessor((TimeStampMicroTZVector) vector);
     } else if (vector instanceof TimeStampMicroVector) {
       accessor = new TimestampNTZAccessor((TimeStampMicroVector) vector);
-    } else if (vector instanceof MapVector) {
-      MapVector mapVector = (MapVector) vector;
+    } else if (vector instanceof MapVector mapVector) {
       accessor = new MapAccessor(mapVector);
-    } else if (vector instanceof ListVector) {
-      ListVector listVector = (ListVector) vector;
+    } else if (vector instanceof ListVector listVector) {
       accessor = new ArrayAccessor(listVector);
-    } else if (vector instanceof StructVector) {
-      StructVector structVector = (StructVector) vector;
+    } else if (vector instanceof StructVector structVector) {
       accessor = new StructAccessor(structVector);
 
       childColumns = new ArrowColumnVector[structVector.size()];
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
index da794e4bb918..c0d2ae8e7d0e 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
@@ -68,8 +68,7 @@ public final class ColumnarBatchRow extends InternalRow {
           row.update(i, getUTF8String(i).copy());
         } else if (pdt instanceof PhysicalBinaryType) {
           row.update(i, getBinary(i));
-        } else if (pdt instanceof PhysicalDecimalType) {
-          PhysicalDecimalType t = (PhysicalDecimalType)pdt;
+        } else if (pdt instanceof PhysicalDecimalType t) {
           row.setDecimal(i, getDecimal(i, t.precision(), t.scale()), 
t.precision());
         } else if (pdt instanceof PhysicalStructType) {
           row.update(i, getStruct(i, ((PhysicalStructType) 
pdt).fields().length).copy());
@@ -169,8 +168,7 @@ public final class ColumnarBatchRow extends InternalRow {
       return getUTF8String(ordinal);
     } else if (dataType instanceof BinaryType) {
       return getBinary(ordinal);
-    } else if (dataType instanceof DecimalType) {
-      DecimalType t = (DecimalType) dataType;
+    } else if (dataType instanceof DecimalType t) {
       return getDecimal(ordinal, t.precision(), t.scale());
     } else if (dataType instanceof DateType) {
       return getInt(ordinal);
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java
index c4fbc2ff6422..1df4653f5527 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java
@@ -75,8 +75,7 @@ public final class ColumnarRow extends InternalRow {
           row.update(i, getUTF8String(i).copy());
         } else if (pdt instanceof PhysicalBinaryType) {
           row.update(i, getBinary(i));
-        } else if (pdt instanceof PhysicalDecimalType) {
-          PhysicalDecimalType t = (PhysicalDecimalType)pdt;
+        } else if (pdt instanceof PhysicalDecimalType t) {
           row.setDecimal(i, getDecimal(i, t.precision(), t.scale()), 
t.precision());
         } else if (pdt instanceof PhysicalStructType) {
           row.update(i, getStruct(i, ((PhysicalStructType) 
pdt).fields().length).copy());
@@ -176,8 +175,7 @@ public final class ColumnarRow extends InternalRow {
       return getUTF8String(ordinal);
     } else if (dataType instanceof BinaryType) {
       return getBinary(ordinal);
-    } else if (dataType instanceof DecimalType) {
-      DecimalType t = (DecimalType) dataType;
+    } else if (dataType instanceof DecimalType t) {
       return getDecimal(ordinal, t.precision(), t.scale());
     } else if (dataType instanceof DateType) {
       return getInt(ordinal);
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVectorUtils.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVectorUtils.java
index 89f6996e4610..6fbd76538aa8 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVectorUtils.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcColumnVectorUtils.java
@@ -40,8 +40,7 @@ class OrcColumnVectorUtils {
       vector instanceof DecimalColumnVector ||
       vector instanceof TimestampColumnVector) {
       return new OrcAtomicColumnVector(type, vector);
-    } else if (vector instanceof StructColumnVector) {
-      StructColumnVector structVector = (StructColumnVector) vector;
+    } else if (vector instanceof StructColumnVector structVector) {
       OrcColumnVector[] fields = new 
OrcColumnVector[structVector.fields.length];
       int ordinal = 0;
       for (StructField f : ((StructType) type).fields()) {
@@ -49,13 +48,11 @@ class OrcColumnVectorUtils {
         ordinal++;
       }
       return new OrcStructColumnVector(type, vector, fields);
-    } else if (vector instanceof ListColumnVector) {
-      ListColumnVector listVector = (ListColumnVector) vector;
+    } else if (vector instanceof ListColumnVector listVector) {
       OrcColumnVector dataVector = toOrcColumnVector(
         ((ArrayType) type).elementType(), listVector.child);
       return new OrcArrayColumnVector(type, vector, dataVector);
-    } else if (vector instanceof MapColumnVector) {
-      MapColumnVector mapVector = (MapColumnVector) vector;
+    } else if (vector instanceof MapColumnVector mapVector) {
       MapType mapType = (MapType) type;
       OrcColumnVector keysVector = toOrcColumnVector(mapType.keyType(), 
mapVector.keys);
       OrcColumnVector valuesVector = toOrcColumnVector(mapType.valueType(), 
mapVector.values);
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorUpdaterFactory.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorUpdaterFactory.java
index 15d58f0c7572..d5675db4c3ad 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorUpdaterFactory.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorUpdaterFactory.java
@@ -1143,8 +1143,7 @@ public class ParquetVectorUpdaterFactory {
   }
 
   private static boolean isLongDecimal(DataType dt) {
-    if (dt instanceof DecimalType) {
-      DecimalType d = (DecimalType) dt;
+    if (dt instanceof DecimalType d) {
       return d.precision() == 20 && d.scale() == 0;
     }
     return false;
@@ -1153,8 +1152,7 @@ public class ParquetVectorUpdaterFactory {
   private static boolean isDecimalTypeMatched(ColumnDescriptor descriptor, 
DataType dt) {
     DecimalType d = (DecimalType) dt;
     LogicalTypeAnnotation typeAnnotation = 
descriptor.getPrimitiveType().getLogicalTypeAnnotation();
-    if (typeAnnotation instanceof DecimalLogicalTypeAnnotation) {
-      DecimalLogicalTypeAnnotation decimalType = 
(DecimalLogicalTypeAnnotation) typeAnnotation;
+    if (typeAnnotation instanceof DecimalLogicalTypeAnnotation decimalType) {
       // It's OK if the required decimal precision is larger than or equal to 
the physical decimal
       // precision in the Parquet metadata, as long as the decimal scale is 
the same.
       return decimalType.getPrecision() <= d.precision() && 
decimalType.getScale() == d.scale();
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java
index 71ea3e9ce097..baefa254466f 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedParquetRecordReader.java
@@ -403,9 +403,8 @@ public class VectorizedParquetRecordReader extends 
SpecificParquetRecordReaderBa
 
   private boolean containsPath(Type parquetType, String[] path, int depth) {
     if (path.length == depth) return true;
-    if (parquetType instanceof GroupType) {
+    if (parquetType instanceof GroupType parquetGroupType) {
       String fieldName = path[depth];
-      GroupType parquetGroupType = (GroupType) parquetType;
       if (parquetGroupType.containsField(fieldName)) {
         return containsPath(parquetGroupType.getType(fieldName), path, depth + 
1);
       }
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVectorUtils.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVectorUtils.java
index 6ab81cf40483..7b841ab9933e 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVectorUtils.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ColumnVectorUtils.java
@@ -75,8 +75,7 @@ public class ColumnVectorUtils {
       } else if (pdt instanceof PhysicalStringType) {
         UTF8String v = row.getUTF8String(fieldIdx);
         col.setUtf8String(v);
-      } else if (pdt instanceof PhysicalDecimalType) {
-        PhysicalDecimalType dt = (PhysicalDecimalType) pdt;
+      } else if (pdt instanceof PhysicalDecimalType dt) {
         Decimal d = row.getDecimal(fieldIdx, dt.precision(), dt.scale());
         if (dt.precision() <= Decimal.MAX_INT_DIGITS()) {
           col.setInt((int)d.toUnscaledLong());
@@ -151,8 +150,7 @@ public class ColumnVectorUtils {
       } else if (t == DataTypes.BinaryType) {
         byte[] b = (byte[]) o;
         dst.appendByteArray(b, 0, b.length);
-      } else if (t instanceof DecimalType) {
-        DecimalType dt = (DecimalType) t;
+      } else if (t instanceof DecimalType dt) {
         Decimal d = Decimal.apply((BigDecimal) o, dt.precision(), dt.scale());
         if (dt.precision() <= Decimal.MAX_INT_DIGITS()) {
           dst.appendInt((int) d.toUnscaledLong());
@@ -182,8 +180,7 @@ public class ColumnVectorUtils {
   }
 
   private static void appendValue(WritableColumnVector dst, DataType t, Row 
src, int fieldIdx) {
-    if (t instanceof ArrayType) {
-      ArrayType at = (ArrayType)t;
+    if (t instanceof ArrayType at) {
       if (src.isNullAt(fieldIdx)) {
         dst.appendNull();
       } else {
@@ -193,8 +190,7 @@ public class ColumnVectorUtils {
           appendValue(dst.arrayData(), at.elementType(), o);
         }
       }
-    } else if (t instanceof StructType) {
-      StructType st = (StructType)t;
+    } else if (t instanceof StructType st) {
       if (src.isNullAt(fieldIdx)) {
         dst.appendStruct(true);
       } else {
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/MutableColumnarRow.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/MutableColumnarRow.java
index 64568f18f685..eda58815f3b3 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/MutableColumnarRow.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/MutableColumnarRow.java
@@ -73,8 +73,7 @@ public final class MutableColumnarRow extends InternalRow {
           row.update(i, getUTF8String(i).copy());
         } else if (dt instanceof BinaryType) {
           row.update(i, getBinary(i));
-        } else if (dt instanceof DecimalType) {
-          DecimalType t = (DecimalType)dt;
+        } else if (dt instanceof DecimalType t) {
           row.setDecimal(i, getDecimal(i, t.precision(), t.scale()), 
t.precision());
         } else if (dt instanceof DateType) {
           row.setInt(i, getInt(i));
@@ -178,8 +177,7 @@ public final class MutableColumnarRow extends InternalRow {
       return getUTF8String(ordinal);
     } else if (dataType instanceof BinaryType) {
       return getBinary(ordinal);
-    } else if (dataType instanceof DecimalType) {
-      DecimalType t = (DecimalType) dataType;
+    } else if (dataType instanceof DecimalType t) {
       return getDecimal(ordinal, t.precision(), t.scale());
     } else if (dataType instanceof DateType) {
       return getInt(ordinal);
@@ -214,8 +212,7 @@ public final class MutableColumnarRow extends InternalRow {
         setFloat(ordinal, (float) value);
       } else if (dt instanceof DoubleType) {
         setDouble(ordinal, (double) value);
-      } else if (dt instanceof DecimalType) {
-        DecimalType t = (DecimalType) dt;
+      } else if (dt instanceof DecimalType t) {
         Decimal d = Decimal.apply((BigDecimal) value, t.precision(), 
t.scale());
         setDecimal(ordinal, d, t.precision());
       } else if (dt instanceof CalendarIntervalType) {
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/WritableColumnVector.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/WritableColumnVector.java
index ac8da471f003..4c8ceff35659 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/WritableColumnVector.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/WritableColumnVector.java
@@ -721,8 +721,7 @@ public abstract class WritableColumnVector extends 
ColumnVector {
     if (value instanceof Byte) {
       return Optional.of(appendBytes(length, (Byte) value));
     }
-    if (value instanceof Decimal) {
-      Decimal decimal = (Decimal) value;
+    if (value instanceof Decimal decimal) {
       long unscaled = decimal.toUnscaledLong();
       if (decimal.precision() < 10) {
         return Optional.of(appendInts(length, (int) unscaled));
@@ -745,8 +744,7 @@ public abstract class WritableColumnVector extends 
ColumnVector {
     if (value instanceof Short) {
       return Optional.of(appendShorts(length, (Short) value));
     }
-    if (value instanceof UTF8String) {
-      UTF8String utf8 = (UTF8String) value;
+    if (value instanceof UTF8String utf8) {
       byte[] bytes = utf8.getBytes();
       int result = 0;
       for (int i = 0; i < length; ++i) {
@@ -754,8 +752,7 @@ public abstract class WritableColumnVector extends 
ColumnVector {
       }
       return Optional.of(result);
     }
-    if (value instanceof GenericArrayData) {
-      GenericArrayData arrayData = (GenericArrayData) value;
+    if (value instanceof GenericArrayData arrayData) {
       int result = 0;
       for (int i = 0; i < length; ++i) {
         appendArray(arrayData.numElements());
@@ -768,8 +765,7 @@ public abstract class WritableColumnVector extends 
ColumnVector {
       }
       return Optional.of(result);
     }
-    if (value instanceof GenericInternalRow) {
-      GenericInternalRow row = (GenericInternalRow) value;
+    if (value instanceof GenericInternalRow row) {
       int result = 0;
       for (int i = 0; i < length; ++i) {
         appendStruct(false);
@@ -783,8 +779,7 @@ public abstract class WritableColumnVector extends 
ColumnVector {
       }
       return Optional.of(result);
     }
-    if (value instanceof ArrayBasedMapData) {
-      ArrayBasedMapData data = (ArrayBasedMapData) value;
+    if (value instanceof ArrayBasedMapData data) {
       appendArray(length);
       int result = 0;
       for (int i = 0; i < length; ++i) {
@@ -965,14 +960,12 @@ public abstract class WritableColumnVector extends 
ColumnVector {
       }
       this.childColumns = new WritableColumnVector[1];
       this.childColumns[0] = reserveNewColumn(childCapacity, childType);
-    } else if (type instanceof StructType) {
-      StructType st = (StructType)type;
+    } else if (type instanceof StructType st) {
       this.childColumns = new WritableColumnVector[st.fields().length];
       for (int i = 0; i < childColumns.length; ++i) {
         this.childColumns[i] = reserveNewColumn(capacity, 
st.fields()[i].dataType());
       }
-    } else if (type instanceof MapType) {
-      MapType mapType = (MapType) type;
+    } else if (type instanceof MapType mapType) {
       this.childColumns = new WritableColumnVector[2];
       this.childColumns[0] = reserveNewColumn(capacity, mapType.keyType());
       this.childColumns[1] = reserveNewColumn(capacity, mapType.valueType());
diff --git 
a/sql/core/src/test/java/test/org/apache/spark/sql/JavaBeanDeserializationSuite.java
 
b/sql/core/src/test/java/test/org/apache/spark/sql/JavaBeanDeserializationSuite.java
index f5884efd8b23..a83041dc522c 100644
--- 
a/sql/core/src/test/java/test/org/apache/spark/sql/JavaBeanDeserializationSuite.java
+++ 
b/sql/core/src/test/java/test/org/apache/spark/sql/JavaBeanDeserializationSuite.java
@@ -282,8 +282,7 @@ public class JavaBeanDeserializationSuite implements 
Serializable {
 
     @Override
     public boolean equals(Object obj) {
-      if (!(obj instanceof ArrayRecord)) return false;
-      ArrayRecord other = (ArrayRecord) obj;
+      if (!(obj instanceof ArrayRecord other)) return false;
       return (other.id == this.id) && Objects.equals(other.intervals, 
this.intervals) &&
               Arrays.equals(other.ints, ints);
     }
@@ -330,8 +329,7 @@ public class JavaBeanDeserializationSuite implements 
Serializable {
 
     @Override
     public boolean equals(Object obj) {
-      if (!(obj instanceof MapRecord)) return false;
-      MapRecord other = (MapRecord) obj;
+      if (!(obj instanceof MapRecord other)) return false;
       return (other.id == this.id) && Objects.equals(other.intervals, 
this.intervals);
     }
 
@@ -376,8 +374,7 @@ public class JavaBeanDeserializationSuite implements 
Serializable {
 
     @Override
     public boolean equals(Object obj) {
-      if (!(obj instanceof Interval)) return false;
-      Interval other = (Interval) obj;
+      if (!(obj instanceof Interval other)) return false;
       return (other.startTime == this.startTime) && (other.endTime == 
this.endTime);
     }
 
@@ -635,10 +632,9 @@ public class JavaBeanDeserializationSuite implements 
Serializable {
     }
 
     public boolean equals(Object o) {
-      if (!(o instanceof Item)) {
+      if (!(o instanceof Item other)) {
         return false;
       }
-      Item other = (Item) o;
       if (other.getK().equals(k) && other.getV() == v) {
         return true;
       }
diff --git 
a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java 
b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
index d8068d57ee5e..254c6df28209 100644
--- a/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
+++ b/sql/core/src/test/java/test/org/apache/spark/sql/JavaDatasetSuite.java
@@ -1752,8 +1752,7 @@ public class JavaDatasetSuite implements Serializable {
     }
 
     public boolean equals(Object other) {
-      if (other instanceof BeanWithEnum) {
-        BeanWithEnum beanWithEnum = (BeanWithEnum) other;
+      if (other instanceof BeanWithEnum beanWithEnum) {
         return beanWithEnum.regularField.equals(regularField)
           && beanWithEnum.enumField.equals(enumField);
       }
diff --git 
a/sql/core/src/test/java/test/org/apache/spark/sql/connector/JavaAdvancedDataSourceV2.java
 
b/sql/core/src/test/java/test/org/apache/spark/sql/connector/JavaAdvancedDataSourceV2.java
index 1a55d198361e..0c12fd5484a6 100644
--- 
a/sql/core/src/test/java/test/org/apache/spark/sql/connector/JavaAdvancedDataSourceV2.java
+++ 
b/sql/core/src/test/java/test/org/apache/spark/sql/connector/JavaAdvancedDataSourceV2.java
@@ -61,8 +61,7 @@ public class JavaAdvancedDataSourceV2 implements 
TestingV2Source {
     @Override
     public Filter[] pushFilters(Filter[] filters) {
       Filter[] supported = Arrays.stream(filters).filter(f -> {
-        if (f instanceof GreaterThan) {
-          GreaterThan gt = (GreaterThan) f;
+        if (f instanceof GreaterThan gt) {
           return gt.attribute().equals("i") && gt.value() instanceof Integer;
         } else {
           return false;
@@ -70,8 +69,7 @@ public class JavaAdvancedDataSourceV2 implements 
TestingV2Source {
       }).toArray(Filter[]::new);
 
       Filter[] unsupported = Arrays.stream(filters).filter(f -> {
-        if (f instanceof GreaterThan) {
-          GreaterThan gt = (GreaterThan) f;
+        if (f instanceof GreaterThan gt) {
           return !gt.attribute().equals("i") || !(gt.value() instanceof 
Integer);
         } else {
           return true;
@@ -114,8 +112,7 @@ public class JavaAdvancedDataSourceV2 implements 
TestingV2Source {
 
       Integer lowerBound = null;
       for (Filter filter : filters) {
-        if (filter instanceof GreaterThan) {
-          GreaterThan f = (GreaterThan) filter;
+        if (filter instanceof GreaterThan f) {
           if ("i".equals(f.attribute()) && f.value() instanceof Integer) {
             lowerBound = (Integer) f.value();
             break;


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to