This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 231d89f89ed [SPARK-46270][SQL][CORE][SS] Use java16 instanceof 
expressions to replace the java8 instanceof statement
231d89f89ed is described below

commit 231d89f89ede2cac6cad596f2a3b36673ad0b2f3
Author: Jiaan Geng <belie...@163.com>
AuthorDate: Wed Dec 6 10:59:39 2023 -0800

    [SPARK-46270][SQL][CORE][SS] Use java16 instanceof expressions to replace 
the java8 instanceof statement
    
    ### What changes were proposed in this pull request?
    This PR uses java14 `instanceof` to replace the java8 `instanceof`.
    For example:
    ```
    if (obj instanceof String) {
        String s = (String) obj;    // grr...
        ...
    }
    ```
    We can change it to
    ```
    if (obj instanceof String s) {
        // Let pattern matching do the work!
        ...
    }
    ```
    
    ### Why are the changes needed?
    Using [`[JEP 394: Pattern Matching for instanceof]` 
](https://openjdk.org/jeps/394)can bring the following benefits:
    
    1. **More concise syntax**: Pattern matching allows the desired "shape" of 
an object to be expressed concisely (the pattern), and for various statements 
and expressions to test that "shape" against their input (the matching).
    
    2. **Safer**: The motto is: "A pattern variable is in scope where it has 
definitely matched". This allows for the safe reuse of pattern variables and is 
both intuitive and familiar, since Java developers are already used to flow 
sensitive analyses.
    
    3. **Avoid explicit casts**: The use of pattern matching in instanceof 
should significantly reduce the overall number of explicit casts in Java 
programs. Type test patterns are particularly useful when writing equality 
methods.
    
    ### Does this PR introduce _any_ user-facing change?
    'No'.
    
    ### How was this patch tested?
    GA
    
    ### Was this patch authored or co-authored using generative AI tooling?
    'No'.
    
    Closes #44187 from beliefer/SPARK-46270.
    
    Authored-by: Jiaan Geng <belie...@163.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../apache/spark/util/kvstore/ArrayWrappers.java   | 28 ++++----
 .../spark/util/kvstore/KVStoreSerializer.java      |  4 +-
 .../apache/spark/util/kvstore/LevelDBTypeInfo.java | 12 ++--
 .../apache/spark/util/kvstore/RocksDBTypeInfo.java | 12 ++--
 .../spark/network/client/StreamInterceptor.java    |  4 +-
 .../network/client/TransportResponseHandler.java   | 21 ++----
 .../protocol/EncryptedMessageWithHeader.java       | 14 ++--
 .../spark/network/protocol/MessageWithHeader.java  |  8 +--
 .../spark/network/protocol/SslMessageEncoder.java  |  7 +-
 .../spark/network/sasl/SaslClientBootstrap.java    |  4 +-
 .../network/server/TransportChannelHandler.java    |  8 +--
 .../network/server/TransportRequestHandler.java    | 28 ++++----
 .../network/ssl/ReloadingX509TrustManager.java     |  4 +-
 .../org/apache/spark/network/ssl/SSLFactory.java   |  4 +-
 .../org/apache/spark/network/util/NettyLogger.java | 13 ++--
 .../apache/spark/network/TestManagedBuffer.java    |  4 +-
 .../spark/network/crypto/AuthIntegrationSuite.java |  4 +-
 .../apache/spark/network/shuffle/ErrorHandler.java |  4 +-
 .../network/shuffle/ExternalBlockHandler.java      | 27 +++-----
 .../shuffle/RetryingBlockTransferorSuite.java      |  8 +--
 .../network/yarn/YarnShuffleServiceMetrics.java    | 24 +++----
 .../apache/spark/util/sketch/BloomFilterImpl.java  | 16 ++---
 .../spark/util/sketch/CountMinSketchImpl.java      | 16 ++---
 .../java/org/apache/spark/util/sketch/Utils.java   | 16 ++---
 .../org/apache/spark/unsafe/types/UTF8String.java  |  6 +-
 .../org/apache/spark/io/ReadAheadInputStream.java  |  4 +-
 .../unsafe/sort/UnsafeExternalSorter.java          |  4 +-
 .../unsafe/sort/UnsafeInMemorySorter.java          |  5 +-
 .../org/apache/spark/launcher/LauncherServer.java  |  4 +-
 .../expressions/SpecializedGettersReader.java      |  8 +--
 .../sql/catalyst/expressions/UnsafeDataUtils.java  |  6 +-
 .../spark/sql/catalyst/expressions/UnsafeRow.java  | 12 ++--
 .../spark/sql/connector/read/streaming/Offset.java |  4 +-
 .../sql/connector/util/V2ExpressionSQLBuilder.java | 17 ++---
 .../spark/sql/vectorized/ArrowColumnVector.java    | 76 +++++++++++-----------
 .../spark/sql/vectorized/ColumnarBatchRow.java     |  8 +--
 .../apache/spark/sql/vectorized/ColumnarRow.java   |  4 +-
 .../datasources/orc/OrcAtomicColumnVector.java     | 20 +++---
 .../execution/datasources/orc/OrcFooterReader.java | 14 ++--
 .../parquet/ParquetVectorUpdaterFactory.java       |  9 ++-
 .../parquet/VectorizedColumnReader.java            |  7 +-
 .../execution/vectorized/ConstantColumnVector.java |  4 +-
 .../execution/vectorized/MutableColumnarRow.java   |  4 +-
 .../JavaAdvancedDataSourceV2WithV2Filter.java      |  4 +-
 .../org/apache/spark/sql/hive/test/Complex.java    |  2 +-
 .../spark/streaming/JavaWriteAheadLogSuite.java    |  4 +-
 46 files changed, 246 insertions(+), 270 deletions(-)

diff --git 
a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/ArrayWrappers.java 
b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/ArrayWrappers.java
index 6f9487322bb..5265881e990 100644
--- 
a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/ArrayWrappers.java
+++ 
b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/ArrayWrappers.java
@@ -40,12 +40,12 @@ class ArrayWrappers {
   public static Comparable<Object> forArray(Object a) {
     Preconditions.checkArgument(a.getClass().isArray());
     Comparable<?> ret;
-    if (a instanceof int[]) {
-      ret = new ComparableIntArray((int[]) a);
-    } else if (a instanceof long[]) {
-      ret = new ComparableLongArray((long[]) a);
-    } else if (a instanceof byte[]) {
-      ret = new ComparableByteArray((byte[]) a);
+    if (a instanceof int[] ia) {
+      ret = new ComparableIntArray(ia);
+    } else if (a instanceof long[] la) {
+      ret = new ComparableLongArray(la);
+    } else if (a instanceof byte[] ba) {
+      ret = new ComparableByteArray(ba);
     } else {
       
Preconditions.checkArgument(!a.getClass().getComponentType().isPrimitive());
       ret = new ComparableObjectArray((Object[]) a);
@@ -63,10 +63,10 @@ class ArrayWrappers {
 
     @Override
     public boolean equals(Object other) {
-      if (!(other instanceof ComparableIntArray)) {
+      if (!(other instanceof ComparableIntArray comparableIntArray)) {
         return false;
       }
-      return Arrays.equals(array, ((ComparableIntArray) other).array);
+      return Arrays.equals(array, comparableIntArray.array);
     }
 
     @Override
@@ -102,10 +102,10 @@ class ArrayWrappers {
 
     @Override
     public boolean equals(Object other) {
-      if (!(other instanceof ComparableLongArray)) {
+      if (!(other instanceof ComparableLongArray comparableLongArray)) {
         return false;
       }
-      return Arrays.equals(array, ((ComparableLongArray) other).array);
+      return Arrays.equals(array, comparableLongArray.array);
     }
 
     @Override
@@ -141,10 +141,10 @@ class ArrayWrappers {
 
     @Override
     public boolean equals(Object other) {
-      if (!(other instanceof ComparableByteArray)) {
+      if (!(other instanceof ComparableByteArray comparableByteArray)) {
         return false;
       }
-      return Arrays.equals(array, ((ComparableByteArray) other).array);
+      return Arrays.equals(array, comparableByteArray.array);
     }
 
     @Override
@@ -180,10 +180,10 @@ class ArrayWrappers {
 
     @Override
     public boolean equals(Object other) {
-      if (!(other instanceof ComparableObjectArray)) {
+      if (!(other instanceof ComparableObjectArray comparableObjectArray)) {
         return false;
       }
-      return Arrays.equals(array, ((ComparableObjectArray) other).array);
+      return Arrays.equals(array, comparableObjectArray.array);
     }
 
     @Override
diff --git 
a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVStoreSerializer.java
 
b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVStoreSerializer.java
index 02dd73e1a2f..a1a51e4f076 100644
--- 
a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVStoreSerializer.java
+++ 
b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/KVStoreSerializer.java
@@ -50,8 +50,8 @@ public class KVStoreSerializer {
   }
 
   public byte[] serialize(Object o) throws Exception {
-    if (o instanceof String) {
-      return ((String) o).getBytes(UTF_8);
+    if (o instanceof String str) {
+      return str.getBytes(UTF_8);
     } else {
       ByteArrayOutputStream bytes = new ByteArrayOutputStream();
       try (GZIPOutputStream out = new GZIPOutputStream(bytes)) {
diff --git 
a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDBTypeInfo.java
 
b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDBTypeInfo.java
index 4d7f76f6738..21a412a36f3 100644
--- 
a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDBTypeInfo.java
+++ 
b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/LevelDBTypeInfo.java
@@ -459,13 +459,13 @@ class LevelDBTypeInfo {
     byte[] toKey(Object value, byte prefix) {
       final byte[] result;
 
-      if (value instanceof String) {
-        byte[] str = ((String) value).getBytes(UTF_8);
-        result = new byte[str.length + 1];
+      if (value instanceof String str) {
+        byte[] bytes = str.getBytes(UTF_8);
+        result = new byte[bytes.length + 1];
         result[0] = prefix;
-        System.arraycopy(str, 0, result, 1, str.length);
-      } else if (value instanceof Boolean) {
-        result = new byte[] { prefix, (Boolean) value ? TRUE : FALSE };
+        System.arraycopy(bytes, 0, result, 1, bytes.length);
+      } else if (value instanceof Boolean bool) {
+        result = new byte[] { prefix, bool ? TRUE : FALSE };
       } else if (value.getClass().isArray()) {
         int length = Array.getLength(value);
         byte[][] components = new byte[length][];
diff --git 
a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/RocksDBTypeInfo.java
 
b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/RocksDBTypeInfo.java
index c5452b4104b..3d1ba72f94d 100644
--- 
a/common/kvstore/src/main/java/org/apache/spark/util/kvstore/RocksDBTypeInfo.java
+++ 
b/common/kvstore/src/main/java/org/apache/spark/util/kvstore/RocksDBTypeInfo.java
@@ -461,13 +461,13 @@ class RocksDBTypeInfo {
     byte[] toKey(Object value, byte prefix) {
       final byte[] result;
 
-      if (value instanceof String) {
-        byte[] str = ((String) value).getBytes(UTF_8);
-        result = new byte[str.length + 1];
+      if (value instanceof String str) {
+        byte[] bytes = str.getBytes(UTF_8);
+        result = new byte[bytes.length + 1];
         result[0] = prefix;
-        System.arraycopy(str, 0, result, 1, str.length);
-      } else if (value instanceof Boolean) {
-        result = new byte[] { prefix, (Boolean) value ? TRUE : FALSE };
+        System.arraycopy(bytes, 0, result, 1, bytes.length);
+      } else if (value instanceof Boolean bool) {
+        result = new byte[] { prefix, bool ? TRUE : FALSE };
       } else if (value.getClass().isArray()) {
         int length = Array.getLength(value);
         byte[][] components = new byte[length][];
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/client/StreamInterceptor.java
 
b/common/network-common/src/main/java/org/apache/spark/network/client/StreamInterceptor.java
index f3eb744ff73..b5365d67cc9 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/client/StreamInterceptor.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/client/StreamInterceptor.java
@@ -63,10 +63,10 @@ public class StreamInterceptor<T extends Message> 
implements TransportFrameDecod
   }
 
   private void deactivateStream() {
-    if (handler instanceof TransportResponseHandler) {
+    if (handler instanceof TransportResponseHandler transportResponseHandler) {
       // we only have to do this for TransportResponseHandler as it exposes 
numOutstandingFetches
       // (there is no extra cleanup that needs to happen)
-      ((TransportResponseHandler) handler).deactivateStream();
+      transportResponseHandler.deactivateStream();
     }
   }
 
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java
 
b/common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java
index cf9af2e00c8..90416784351 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/client/TransportResponseHandler.java
@@ -160,8 +160,7 @@ public class TransportResponseHandler extends 
MessageHandler<ResponseMessage> {
 
   @Override
   public void handle(ResponseMessage message) throws Exception {
-    if (message instanceof ChunkFetchSuccess) {
-      ChunkFetchSuccess resp = (ChunkFetchSuccess) message;
+    if (message instanceof ChunkFetchSuccess resp) {
       ChunkReceivedCallback listener = 
outstandingFetches.get(resp.streamChunkId);
       if (listener == null) {
         logger.warn("Ignoring response for block {} from {} since it is not 
outstanding",
@@ -172,8 +171,7 @@ public class TransportResponseHandler extends 
MessageHandler<ResponseMessage> {
         listener.onSuccess(resp.streamChunkId.chunkIndex(), resp.body());
         resp.body().release();
       }
-    } else if (message instanceof ChunkFetchFailure) {
-      ChunkFetchFailure resp = (ChunkFetchFailure) message;
+    } else if (message instanceof ChunkFetchFailure resp) {
       ChunkReceivedCallback listener = 
outstandingFetches.get(resp.streamChunkId);
       if (listener == null) {
         logger.warn("Ignoring response for block {} from {} ({}) since it is 
not outstanding",
@@ -183,8 +181,7 @@ public class TransportResponseHandler extends 
MessageHandler<ResponseMessage> {
         listener.onFailure(resp.streamChunkId.chunkIndex(), new 
ChunkFetchFailureException(
           "Failure while fetching " + resp.streamChunkId + ": " + 
resp.errorString));
       }
-    } else if (message instanceof RpcResponse) {
-      RpcResponse resp = (RpcResponse) message;
+    } else if (message instanceof RpcResponse resp) {
       RpcResponseCallback listener = (RpcResponseCallback) 
outstandingRpcs.get(resp.requestId);
       if (listener == null) {
         logger.warn("Ignoring response for RPC {} from {} ({} bytes) since it 
is not outstanding",
@@ -198,8 +195,7 @@ public class TransportResponseHandler extends 
MessageHandler<ResponseMessage> {
           resp.body().release();
         }
       }
-    } else if (message instanceof RpcFailure) {
-      RpcFailure resp = (RpcFailure) message;
+    } else if (message instanceof RpcFailure resp) {
       BaseResponseCallback listener = outstandingRpcs.get(resp.requestId);
       if (listener == null) {
         logger.warn("Ignoring response for RPC {} from {} ({}) since it is not 
outstanding",
@@ -208,8 +204,7 @@ public class TransportResponseHandler extends 
MessageHandler<ResponseMessage> {
         outstandingRpcs.remove(resp.requestId);
         listener.onFailure(new RuntimeException(resp.errorString));
       }
-    } else if (message instanceof MergedBlockMetaSuccess) {
-      MergedBlockMetaSuccess resp = (MergedBlockMetaSuccess) message;
+    } else if (message instanceof MergedBlockMetaSuccess resp) {
       try {
         MergedBlockMetaResponseCallback listener =
           (MergedBlockMetaResponseCallback) 
outstandingRpcs.get(resp.requestId);
@@ -224,8 +219,7 @@ public class TransportResponseHandler extends 
MessageHandler<ResponseMessage> {
       } finally {
         resp.body().release();
       }
-    } else if (message instanceof StreamResponse) {
-      StreamResponse resp = (StreamResponse) message;
+    } else if (message instanceof StreamResponse resp) {
       Pair<String, StreamCallback> entry = streamCallbacks.poll();
       if (entry != null) {
         StreamCallback callback = entry.getValue();
@@ -251,8 +245,7 @@ public class TransportResponseHandler extends 
MessageHandler<ResponseMessage> {
       } else {
         logger.error("Could not find callback for StreamResponse.");
       }
-    } else if (message instanceof StreamFailure) {
-      StreamFailure resp = (StreamFailure) message;
+    } else if (message instanceof StreamFailure resp) {
       Pair<String, StreamCallback> entry = streamCallbacks.poll();
       if (entry != null) {
         StreamCallback callback = entry.getValue();
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/EncryptedMessageWithHeader.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/EncryptedMessageWithHeader.java
index 7e7ba85ebf6..d9f83ce8bac 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/EncryptedMessageWithHeader.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/EncryptedMessageWithHeader.java
@@ -84,8 +84,7 @@ public class EncryptedMessageWithHeader implements 
ChunkedInput<ByteBuf> {
     if (totalBytesTransferred < headerLength) {
       totalBytesTransferred += headerLength;
       return header.retain();
-    } else if (body instanceof InputStream) {
-      InputStream stream = (InputStream) body;
+    } else if (body instanceof InputStream stream) {
       int available = stream.available();
       if (available <= 0) {
         available = (int) (length() - totalBytesTransferred);
@@ -101,8 +100,7 @@ public class EncryptedMessageWithHeader implements 
ChunkedInput<ByteBuf> {
       } else {
         throw new EOFException("Unable to read bytes from InputStream");
       }
-    } else if (body instanceof ChunkedStream) {
-      ChunkedStream stream = (ChunkedStream) body;
+    } else if (body instanceof ChunkedStream stream) {
       long old = stream.transferredBytes();
       ByteBuf buffer = stream.readChunk(allocator);
       long read = stream.transferredBytes() - old;
@@ -139,10 +137,10 @@ public class EncryptedMessageWithHeader implements 
ChunkedInput<ByteBuf> {
     if (managedBuffer != null) {
       managedBuffer.release();
     }
-    if (body instanceof InputStream) {
-      ((InputStream) body).close();
-    } else if (body instanceof ChunkedStream) {
-      ((ChunkedStream) body).close();
+    if (body instanceof InputStream stream) {
+      stream.close();
+    } else if (body instanceof ChunkedStream stream) {
+      stream.close();
     }
   }
 }
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java
index de2c44925f6..e8eb83e7577 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/MessageWithHeader.java
@@ -118,10 +118,10 @@ public class MessageWithHeader extends AbstractFileRegion 
{
 
     // Bytes written for body in this call.
     long writtenBody = 0;
-    if (body instanceof FileRegion) {
-      writtenBody = ((FileRegion) body).transferTo(target, 
totalBytesTransferred - headerLength);
-    } else if (body instanceof ByteBuf) {
-      writtenBody = copyByteBuf((ByteBuf) body, target);
+    if (body instanceof FileRegion fileRegion) {
+      writtenBody = fileRegion.transferTo(target, totalBytesTransferred - 
headerLength);
+    } else if (body instanceof ByteBuf byteBuf) {
+      writtenBody = copyByteBuf(byteBuf, target);
     }
     totalBytesTransferred += writtenBody;
 
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/SslMessageEncoder.java
 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/SslMessageEncoder.java
index 87723c6613e..3177971a95d 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/protocol/SslMessageEncoder.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/protocol/SslMessageEncoder.java
@@ -65,8 +65,7 @@ public final class SslMessageEncoder extends 
MessageToMessageEncoder<Message> {
         isBodyInFrame = in.isBodyInFrame();
       } catch (Exception e) {
         in.body().release();
-        if (in instanceof AbstractResponseMessage) {
-          AbstractResponseMessage resp = (AbstractResponseMessage) in;
+        if (in instanceof AbstractResponseMessage resp) {
           // Re-encode this message as a failure response.
           String error = e.getMessage() != null ? e.getMessage() : "null";
           logger.error(String.format("Error processing %s for client %s",
@@ -92,8 +91,8 @@ public final class SslMessageEncoder extends 
MessageToMessageEncoder<Message> {
     assert header.writableBytes() == 0;
 
     if (body != null && bodyLength > 0) {
-      if (body instanceof ByteBuf) {
-        out.add(Unpooled.wrappedBuffer(header, (ByteBuf) body));
+      if (body instanceof ByteBuf byteBuf) {
+        out.add(Unpooled.wrappedBuffer(header, byteBuf));
       } else if (body instanceof InputStream || body instanceof ChunkedStream) 
{
         // For now, assume the InputStream is doing proper chunking.
         out.add(new EncryptedMessageWithHeader(in.body(), header, body, 
bodyLength));
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslClientBootstrap.java
 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslClientBootstrap.java
index 69baaca8a26..7f29af3688e 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslClientBootstrap.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/sasl/SaslClientBootstrap.java
@@ -72,8 +72,8 @@ public class SaslClientBootstrap implements 
TransportClientBootstrap {
           response = client.sendRpcSync(buf.nioBuffer(), 
conf.authRTTimeoutMs());
         } catch (RuntimeException ex) {
           // We know it is a Sasl timeout here if it is a TimeoutException.
-          if (ex.getCause() instanceof TimeoutException) {
-            throw new SaslTimeoutException(ex.getCause());
+          if (ex.getCause() instanceof TimeoutException te) {
+            throw new SaslTimeoutException(te);
           } else {
             throw ex;
           }
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
index e12f9120fdb..a504e8c20a7 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportChannelHandler.java
@@ -136,10 +136,10 @@ public class TransportChannelHandler extends 
SimpleChannelInboundHandler<Message
 
   @Override
   public void channelRead0(ChannelHandlerContext ctx, Message request) throws 
Exception {
-    if (request instanceof RequestMessage) {
-      requestHandler.handle((RequestMessage) request);
-    } else if (request instanceof ResponseMessage) {
-      responseHandler.handle((ResponseMessage) request);
+    if (request instanceof RequestMessage msg) {
+      requestHandler.handle(msg);
+    } else if (request instanceof ResponseMessage msg) {
+      responseHandler.handle(msg);
     } else {
       ctx.fireChannelRead(request);
     }
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java
 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java
index 81012c3ea61..c5e6da4cf6c 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/server/TransportRequestHandler.java
@@ -103,18 +103,18 @@ public class TransportRequestHandler extends 
MessageHandler<RequestMessage> {
 
   @Override
   public void handle(RequestMessage request) throws Exception {
-    if (request instanceof ChunkFetchRequest) {
-      chunkFetchRequestHandler.processFetchRequest(channel, 
(ChunkFetchRequest) request);
-    } else if (request instanceof RpcRequest) {
-      processRpcRequest((RpcRequest) request);
-    } else if (request instanceof OneWayMessage) {
-      processOneWayMessage((OneWayMessage) request);
-    } else if (request instanceof StreamRequest) {
-      processStreamRequest((StreamRequest) request);
-    } else if (request instanceof UploadStream) {
-      processStreamUpload((UploadStream) request);
-    } else if (request instanceof MergedBlockMetaRequest) {
-      processMergedBlockMetaRequest((MergedBlockMetaRequest) request);
+    if (request instanceof ChunkFetchRequest chunkFetchRequest) {
+      chunkFetchRequestHandler.processFetchRequest(channel, chunkFetchRequest);
+    } else if (request instanceof RpcRequest rpcRequest) {
+      processRpcRequest(rpcRequest);
+    } else if (request instanceof OneWayMessage oneWayMessage) {
+      processOneWayMessage(oneWayMessage);
+    } else if (request instanceof StreamRequest streamRequest) {
+      processStreamRequest(streamRequest);
+    } else if (request instanceof UploadStream uploadStream) {
+      processStreamUpload(uploadStream);
+    } else if (request instanceof MergedBlockMetaRequest 
mergedBlockMetaRequest) {
+      processMergedBlockMetaRequest(mergedBlockMetaRequest);
     } else {
       throw new IllegalArgumentException("Unknown request type: " + request);
     }
@@ -249,13 +249,13 @@ public class TransportRequestHandler extends 
MessageHandler<RequestMessage> {
         wrappedCallback.onComplete(wrappedCallback.getID());
       }
     } catch (Exception e) {
-      if (e instanceof BlockPushNonFatalFailure) {
+      if (e instanceof BlockPushNonFatalFailure blockPushNonFatalFailure) {
         // Thrown by rpcHandler.receiveStream(reverseClient, meta, callback), 
the same as
         // onComplete method. Respond an RPC message with the error code to 
client instead of
         // using exceptions encoded in the RPCFailure. Using a proper 
RPCResponse is more
         // efficient, and now only include the too old attempt case here.
         respond(new RpcResponse(req.requestId,
-          new NioManagedBuffer(((BlockPushNonFatalFailure) e).getResponse())));
+          new NioManagedBuffer(blockPushNonFatalFailure.getResponse())));
       } else {
         logger.error("Error while invoking RpcHandler#receive() on RPC id " + 
req.requestId, e);
         respond(new RpcFailure(req.requestId, 
Throwables.getStackTraceAsString(e)));
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/ssl/ReloadingX509TrustManager.java
 
b/common/network-common/src/main/java/org/apache/spark/network/ssl/ReloadingX509TrustManager.java
index 87798bda2a0..6572506f38d 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/ssl/ReloadingX509TrustManager.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/ssl/ReloadingX509TrustManager.java
@@ -190,8 +190,8 @@ public final class ReloadingX509TrustManager
     trustManagerFactory.init(ks);
     TrustManager[] trustManagers = trustManagerFactory.getTrustManagers();
     for (TrustManager trustManager1 : trustManagers) {
-      if (trustManager1 instanceof X509TrustManager) {
-        trustManager = (X509TrustManager) trustManager1;
+      if (trustManager1 instanceof X509TrustManager x509TrustManager) {
+        trustManager = x509TrustManager;
         break;
       }
     }
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/ssl/SSLFactory.java
 
b/common/network-common/src/main/java/org/apache/spark/network/ssl/SSLFactory.java
index dc5af4639da..19c19ec2820 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/ssl/SSLFactory.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/ssl/SSLFactory.java
@@ -132,9 +132,9 @@ public class SSLFactory {
   public void destroy() {
     if (trustManagers != null) {
       for (int i = 0; i < trustManagers.length; i++) {
-        if (trustManagers[i] instanceof ReloadingX509TrustManager) {
+        if (trustManagers[i] instanceof ReloadingX509TrustManager manager) {
           try {
-            ((ReloadingX509TrustManager) trustManagers[i]).destroy();
+            manager.destroy();
           } catch (InterruptedException ex) {
             logger.info("Interrupted while destroying trust manager: " + 
ex.toString(), ex);
           }
diff --git 
a/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java
 
b/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java
index f4c0df6239d..cb66784e419 100644
--- 
a/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java
+++ 
b/common/network-common/src/main/java/org/apache/spark/network/util/NettyLogger.java
@@ -40,15 +40,14 @@ public class NettyLogger {
 
     @Override
     protected String format(ChannelHandlerContext ctx, String eventName, 
Object arg) {
-      if (arg instanceof ByteBuf) {
-        return format(ctx, eventName) + " " + ((ByteBuf) arg).readableBytes() 
+ "B";
-      } else if (arg instanceof ByteBufHolder) {
-        return format(ctx, eventName) + " " +
-          ((ByteBufHolder) arg).content().readableBytes() + "B";
-      } else if (arg instanceof InputStream) {
+      if (arg instanceof ByteBuf byteBuf) {
+        return format(ctx, eventName) + " " + byteBuf.readableBytes() + "B";
+      } else if (arg instanceof ByteBufHolder byteBufHolder) {
+        return format(ctx, eventName) + " " + 
byteBufHolder.content().readableBytes() + "B";
+      } else if (arg instanceof InputStream inputStream) {
         int available = -1;
         try {
-          available = ((InputStream) arg).available();
+          available = inputStream.available();
         } catch (IOException ex) {
           // Swallow, but return -1 to indicate an error happened
         }
diff --git 
a/common/network-common/src/test/java/org/apache/spark/network/TestManagedBuffer.java
 
b/common/network-common/src/test/java/org/apache/spark/network/TestManagedBuffer.java
index 1814634fb92..d1e93e3cb58 100644
--- 
a/common/network-common/src/test/java/org/apache/spark/network/TestManagedBuffer.java
+++ 
b/common/network-common/src/test/java/org/apache/spark/network/TestManagedBuffer.java
@@ -92,9 +92,9 @@ public class TestManagedBuffer extends ManagedBuffer {
 
   @Override
   public boolean equals(Object other) {
-    if (other instanceof ManagedBuffer) {
+    if (other instanceof ManagedBuffer managedBuffer) {
       try {
-        ByteBuffer nioBuf = ((ManagedBuffer) other).nioByteBuffer();
+        ByteBuffer nioBuf = managedBuffer.nioByteBuffer();
         if (nioBuf.remaining() != len) {
           return false;
         } else {
diff --git 
a/common/network-common/src/test/java/org/apache/spark/network/crypto/AuthIntegrationSuite.java
 
b/common/network-common/src/test/java/org/apache/spark/network/crypto/AuthIntegrationSuite.java
index badebe25d18..90f6c874a6c 100644
--- 
a/common/network-common/src/test/java/org/apache/spark/network/crypto/AuthIntegrationSuite.java
+++ 
b/common/network-common/src/test/java/org/apache/spark/network/crypto/AuthIntegrationSuite.java
@@ -199,8 +199,8 @@ public class AuthIntegrationSuite {
     void createServer(String secret, boolean enableAes) throws Exception {
       TransportServerBootstrap introspector = (channel, rpcHandler) -> {
         this.serverChannel = channel;
-        if (rpcHandler instanceof AuthRpcHandler) {
-          this.authRpcHandler = (AuthRpcHandler) rpcHandler;
+        if (rpcHandler instanceof AuthRpcHandler authRpcHandler) {
+          this.authRpcHandler = authRpcHandler;
         }
         return rpcHandler;
       };
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ErrorHandler.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ErrorHandler.java
index f24de2dbfe2..31ed10ad76f 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ErrorHandler.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ErrorHandler.java
@@ -87,9 +87,9 @@ public interface ErrorHandler {
 
       // If the block is too late or the invalid block push or the attempt is 
not the latest one,
       // there is no need to retry it
-      return !(t instanceof BlockPushNonFatalFailure &&
+      return !(t instanceof BlockPushNonFatalFailure blockPushNonFatalFailure 
&&
         BlockPushNonFatalFailure
-          .shouldNotRetryErrorCode(((BlockPushNonFatalFailure) 
t).getReturnCode()));
+          .shouldNotRetryErrorCode(blockPushNonFatalFailure.getReturnCode()));
     }
 
     @Override
diff --git 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
index efcc83f409e..137572da108 100644
--- 
a/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
+++ 
b/common/network-shuffle/src/main/java/org/apache/spark/network/shuffle/ExternalBlockHandler.java
@@ -145,13 +145,12 @@ public class ExternalBlockHandler extends RpcHandler
       try {
         int numBlockIds;
         long streamId;
-        if (msgObj instanceof AbstractFetchShuffleBlocks) {
-          AbstractFetchShuffleBlocks msg = (AbstractFetchShuffleBlocks) msgObj;
+        if (msgObj instanceof AbstractFetchShuffleBlocks msg) {
           checkAuth(client, msg.appId);
-          numBlockIds = ((AbstractFetchShuffleBlocks) msgObj).getNumBlocks();
+          numBlockIds = msg.getNumBlocks();
           Iterator<ManagedBuffer> iterator;
-          if (msgObj instanceof  FetchShuffleBlocks) {
-            iterator = new 
ShuffleManagedBufferIterator((FetchShuffleBlocks)msgObj);
+          if (msgObj instanceof FetchShuffleBlocks blocks) {
+            iterator = new ShuffleManagedBufferIterator(blocks);
           } else {
             iterator = new 
ShuffleChunkManagedBufferIterator((FetchShuffleBlockChunks) msgObj);
           }
@@ -178,11 +177,10 @@ public class ExternalBlockHandler extends RpcHandler
         responseDelayContext.stop();
       }
 
-    } else if (msgObj instanceof RegisterExecutor) {
+    } else if (msgObj instanceof RegisterExecutor msg) {
       final Timer.Context responseDelayContext =
         metrics.registerExecutorRequestLatencyMillis.time();
       try {
-        RegisterExecutor msg = (RegisterExecutor) msgObj;
         checkAuth(client, msg.appId);
         blockManager.registerExecutor(msg.appId, msg.execId, msg.executorInfo);
         mergeManager.registerExecutor(msg.appId, msg.executorInfo);
@@ -191,14 +189,12 @@ public class ExternalBlockHandler extends RpcHandler
         responseDelayContext.stop();
       }
 
-    } else if (msgObj instanceof RemoveBlocks) {
-      RemoveBlocks msg = (RemoveBlocks) msgObj;
+    } else if (msgObj instanceof RemoveBlocks msg) {
       checkAuth(client, msg.appId);
       int numRemovedBlocks = blockManager.removeBlocks(msg.appId, msg.execId, 
msg.blockIds);
       callback.onSuccess(new BlocksRemoved(numRemovedBlocks).toByteBuffer());
 
-    } else if (msgObj instanceof GetLocalDirsForExecutors) {
-      GetLocalDirsForExecutors msg = (GetLocalDirsForExecutors) msgObj;
+    } else if (msgObj instanceof GetLocalDirsForExecutors msg) {
       checkAuth(client, msg.appId);
       Set<String> execIdsForBlockResolver = Sets.newHashSet(msg.execIds);
       boolean fetchMergedBlockDirs = 
execIdsForBlockResolver.remove(SHUFFLE_MERGER_IDENTIFIER);
@@ -208,10 +204,9 @@ public class ExternalBlockHandler extends RpcHandler
         localDirs.put(SHUFFLE_MERGER_IDENTIFIER, 
mergeManager.getMergedBlockDirs(msg.appId));
       }
       callback.onSuccess(new LocalDirsForExecutors(localDirs).toByteBuffer());
-    } else if (msgObj instanceof FinalizeShuffleMerge) {
+    } else if (msgObj instanceof FinalizeShuffleMerge msg) {
       final Timer.Context responseDelayContext =
           metrics.finalizeShuffleMergeLatencyMillis.time();
-      FinalizeShuffleMerge msg = (FinalizeShuffleMerge) msgObj;
       try {
         checkAuth(client, msg.appId);
         MergeStatuses statuses = mergeManager.finalizeShuffleMerge(msg);
@@ -223,14 +218,12 @@ public class ExternalBlockHandler extends RpcHandler
       } finally {
         responseDelayContext.stop();
       }
-    } else if (msgObj instanceof RemoveShuffleMerge) {
-      RemoveShuffleMerge msg = (RemoveShuffleMerge) msgObj;
+    } else if (msgObj instanceof RemoveShuffleMerge msg) {
       checkAuth(client, msg.appId);
       logger.info("Removing shuffle merge data for application {} shuffle {} 
shuffleMerge {}",
           msg.appId, msg.shuffleId, msg.shuffleMergeId);
       mergeManager.removeShuffleMerge(msg);
-    } else if (msgObj instanceof DiagnoseCorruption) {
-      DiagnoseCorruption msg = (DiagnoseCorruption) msgObj;
+    } else if (msgObj instanceof DiagnoseCorruption msg) {
       checkAuth(client, msg.appId);
       Cause cause = blockManager.diagnoseShuffleBlockCorruption(
         msg.appId, msg.execId, msg.shuffleId, msg.mapId, msg.reduceId, 
msg.checksum, msg.algorithm);
diff --git 
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/RetryingBlockTransferorSuite.java
 
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/RetryingBlockTransferorSuite.java
index 240ab4ae24d..3725973ae73 100644
--- 
a/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/RetryingBlockTransferorSuite.java
+++ 
b/common/network-shuffle/src/test/java/org/apache/spark/network/shuffle/RetryingBlockTransferorSuite.java
@@ -444,10 +444,10 @@ public class RetryingBlockTransferorSuite {
             String blockId = block.getKey();
             Object blockValue = block.getValue();
 
-            if (blockValue instanceof ManagedBuffer) {
-              retryListener.onBlockFetchSuccess(blockId, (ManagedBuffer) 
blockValue);
-            } else if (blockValue instanceof Exception) {
-              retryListener.onBlockFetchFailure(blockId, (Exception) 
blockValue);
+            if (blockValue instanceof ManagedBuffer managedBuffer) {
+              retryListener.onBlockFetchSuccess(blockId, managedBuffer);
+            } else if (blockValue instanceof Exception exception) {
+              retryListener.onBlockFetchFailure(blockId, exception);
             } else {
               fail("Can only handle ManagedBuffers and Exceptions, got " + 
blockValue);
             }
diff --git 
a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java
 
b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java
index 21591d9f2f6..891207d2707 100644
--- 
a/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java
+++ 
b/common/network-yarn/src/main/java/org/apache/spark/network/yarn/YarnShuffleServiceMetrics.java
@@ -114,20 +114,16 @@ class YarnShuffleServiceMetrics implements MetricsSource {
           m.getOneMinuteRate())
         .addGauge(new ShuffleServiceMetricsInfo(name + "_rateMean", "Mean rate 
of meter " + name),
           m.getMeanRate());
-    } else if (metric instanceof Gauge) {
-      final Object gaugeValue = ((Gauge) metric).getValue();
-      if (gaugeValue instanceof Integer) {
-        metricsRecordBuilder.addGauge(
-          getShuffleServiceMetricsInfoForGauge(name), (Integer) gaugeValue);
-      } else if (gaugeValue instanceof Long) {
-        metricsRecordBuilder.addGauge(
-          getShuffleServiceMetricsInfoForGauge(name), (Long) gaugeValue);
-      } else if (gaugeValue instanceof Float) {
-        metricsRecordBuilder.addGauge(
-          getShuffleServiceMetricsInfoForGauge(name), (Float) gaugeValue);
-      } else if (gaugeValue instanceof Double) {
-        metricsRecordBuilder.addGauge(
-          getShuffleServiceMetricsInfoForGauge(name), (Double) gaugeValue);
+    } else if (metric instanceof Gauge gauge) {
+      final Object gaugeValue = gauge.getValue();
+      if (gaugeValue instanceof Integer integer) {
+        
metricsRecordBuilder.addGauge(getShuffleServiceMetricsInfoForGauge(name), 
integer);
+      } else if (gaugeValue instanceof Long longVal) {
+        
metricsRecordBuilder.addGauge(getShuffleServiceMetricsInfoForGauge(name), 
longVal);
+      } else if (gaugeValue instanceof Float floatVal) {
+        
metricsRecordBuilder.addGauge(getShuffleServiceMetricsInfoForGauge(name), 
floatVal);
+      } else if (gaugeValue instanceof Double doubleVal) {
+        
metricsRecordBuilder.addGauge(getShuffleServiceMetricsInfoForGauge(name), 
doubleVal);
       } else {
         throw new IllegalStateException(
                 "Not supported class type of metric[" + name + "] for value " 
+ gaugeValue);
diff --git 
a/common/sketch/src/main/java/org/apache/spark/util/sketch/BloomFilterImpl.java 
b/common/sketch/src/main/java/org/apache/spark/util/sketch/BloomFilterImpl.java
index 18ac5a2e429..3bd04a531fe 100644
--- 
a/common/sketch/src/main/java/org/apache/spark/util/sketch/BloomFilterImpl.java
+++ 
b/common/sketch/src/main/java/org/apache/spark/util/sketch/BloomFilterImpl.java
@@ -66,10 +66,10 @@ class BloomFilterImpl extends BloomFilter implements 
Serializable {
 
   @Override
   public boolean put(Object item) {
-    if (item instanceof String) {
-      return putString((String) item);
-    } else if (item instanceof byte[]) {
-      return putBinary((byte[]) item);
+    if (item instanceof String str) {
+      return putString(str);
+    } else if (item instanceof byte[] bytes) {
+      return putBinary(bytes);
     } else {
       return putLong(Utils.integralToLong(item));
     }
@@ -166,10 +166,10 @@ class BloomFilterImpl extends BloomFilter implements 
Serializable {
 
   @Override
   public boolean mightContain(Object item) {
-    if (item instanceof String) {
-      return mightContainString((String) item);
-    } else if (item instanceof byte[]) {
-      return mightContainBinary((byte[]) item);
+    if (item instanceof String str) {
+      return mightContainString(str);
+    } else if (item instanceof byte[] bytes) {
+      return mightContainBinary(bytes);
     } else {
       return mightContainLong(Utils.integralToLong(item));
     }
diff --git 
a/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java
 
b/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java
index 220e8519553..def6ab57c5a 100644
--- 
a/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java
+++ 
b/common/sketch/src/main/java/org/apache/spark/util/sketch/CountMinSketchImpl.java
@@ -141,10 +141,10 @@ class CountMinSketchImpl extends CountMinSketch 
implements Serializable {
 
   @Override
   public void add(Object item, long count) {
-    if (item instanceof String) {
-      addString((String) item, count);
-    } else if (item instanceof byte[]) {
-      addBinary((byte[]) item, count);
+    if (item instanceof String str) {
+      addString(str, count);
+    } else if (item instanceof byte[] bytes) {
+      addBinary(bytes, count);
     } else {
       addLong(Utils.integralToLong(item), count);
     }
@@ -225,10 +225,10 @@ class CountMinSketchImpl extends CountMinSketch 
implements Serializable {
 
   @Override
   public long estimateCount(Object item) {
-    if (item instanceof String) {
-      return estimateCountForStringItem((String) item);
-    } else if (item instanceof byte[]) {
-      return estimateCountForBinaryItem((byte[]) item);
+    if (item instanceof String str) {
+      return estimateCountForStringItem(str);
+    } else if (item instanceof byte[] bytes) {
+      return estimateCountForBinaryItem(bytes);
     } else {
       return estimateCountForLongItem(Utils.integralToLong(item));
     }
diff --git 
a/common/sketch/src/main/java/org/apache/spark/util/sketch/Utils.java 
b/common/sketch/src/main/java/org/apache/spark/util/sketch/Utils.java
index 81461f03000..326e27b98c3 100644
--- a/common/sketch/src/main/java/org/apache/spark/util/sketch/Utils.java
+++ b/common/sketch/src/main/java/org/apache/spark/util/sketch/Utils.java
@@ -27,14 +27,14 @@ class Utils {
   public static long integralToLong(Object i) {
     long longValue;
 
-    if (i instanceof Long) {
-      longValue = (Long) i;
-    } else if (i instanceof Integer) {
-      longValue = ((Integer) i).longValue();
-    } else if (i instanceof Short) {
-      longValue = ((Short) i).longValue();
-    } else if (i instanceof Byte) {
-      longValue = ((Byte) i).longValue();
+    if (i instanceof Long longVal) {
+      longValue = longVal;
+    } else if (i instanceof Integer integer) {
+      longValue = integer.longValue();
+    } else if (i instanceof Short shortVal) {
+      longValue = shortVal.longValue();
+    } else if (i instanceof Byte byteVal) {
+      longValue = byteVal.longValue();
     } else {
       throw new IllegalArgumentException("Unsupported data type " + 
i.getClass().getName());
     }
diff --git 
a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java 
b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
index a2dd03ff18b..21c31c954ba 100644
--- a/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
+++ b/common/unsafe/src/main/java/org/apache/spark/unsafe/types/UTF8String.java
@@ -258,9 +258,9 @@ public final class UTF8String implements 
Comparable<UTF8String>, Externalizable,
    */
   public byte[] getBytes() {
     // avoid copy if `base` is `byte[]`
-    if (offset == BYTE_ARRAY_OFFSET && base instanceof byte[]
-      && ((byte[]) base).length == numBytes) {
-      return (byte[]) base;
+    if (offset == BYTE_ARRAY_OFFSET && base instanceof byte[] bytes
+      && bytes.length == numBytes) {
+      return bytes;
     } else {
       byte[] bytes = new byte[numBytes];
       copyMemory(base, offset, bytes, BYTE_ARRAY_OFFSET, numBytes);
diff --git a/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java 
b/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java
index 011fecb3156..1b76aae8dd2 100644
--- a/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java
+++ b/core/src/main/java/org/apache/spark/io/ReadAheadInputStream.java
@@ -169,10 +169,10 @@ public class ReadAheadInputStream extends InputStream {
         } while (len > 0 && !isWaiting.get());
       } catch (Throwable ex) {
         exception = ex;
-        if (ex instanceof Error) {
+        if (ex instanceof Error error) {
           // `readException` may not be reported to the user. Rethrow Error to 
make sure at least
           // The user can see Error in UncaughtExceptionHandler.
-          throw (Error) ex;
+          throw error;
         }
       } finally {
         stateChangeLock.lock();
diff --git 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
index d442b0ef0ef..2f9e1a9f454 100644
--- 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
+++ 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeExternalSorter.java
@@ -862,8 +862,8 @@ public final class UnsafeExternalSorter extends 
MemoryConsumer {
     }
 
     private void closeIfPossible(UnsafeSorterIterator iterator) {
-      if (iterator instanceof Closeable) {
-        IOUtils.closeQuietly(((Closeable) iterator));
+      if (iterator instanceof Closeable closeable) {
+        IOUtils.closeQuietly((closeable));
       }
     }
   }
diff --git 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
index 4de5440cc15..7579c0aefb2 100644
--- 
a/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
+++ 
b/core/src/main/java/org/apache/spark/util/collection/unsafe/sort/UnsafeInMemorySorter.java
@@ -140,8 +140,9 @@ public final class UnsafeInMemorySorter {
     this.initialSize = array.size();
     if (recordComparator != null) {
       this.sortComparator = new SortComparator(recordComparator, 
prefixComparator, memoryManager);
-      if (canUseRadixSort && prefixComparator instanceof 
PrefixComparators.RadixSortSupport) {
-        this.radixSortSupport = 
(PrefixComparators.RadixSortSupport)prefixComparator;
+      if (canUseRadixSort &&
+        prefixComparator instanceof PrefixComparators.RadixSortSupport 
radixSortSupport) {
+        this.radixSortSupport = radixSortSupport;
       } else {
         this.radixSortSupport = null;
       }
diff --git 
a/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java 
b/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java
index be7c0864c2f..0e0b7949e8d 100644
--- a/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java
+++ b/launcher/src/main/java/org/apache/spark/launcher/LauncherServer.java
@@ -335,8 +335,8 @@ class LauncherServer implements Closeable {
           }
           if (msg instanceof SetAppId set) {
             handle.setAppId(set.appId);
-          } else if (msg instanceof SetState) {
-            handle.setState(((SetState)msg).state);
+          } else if (msg instanceof SetState setState) {
+            handle.setState(setState.state);
           } else {
             throw new IllegalArgumentException("Invalid message: " + 
msgClassName);
           }
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/SpecializedGettersReader.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/SpecializedGettersReader.java
index 9e508dbb271..8e609c6aa29 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/SpecializedGettersReader.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/SpecializedGettersReader.java
@@ -69,8 +69,8 @@ public final class SpecializedGettersReader {
     if (physicalDataType instanceof PhysicalVariantType) {
       return obj.getVariant(ordinal);
     }
-    if (physicalDataType instanceof PhysicalStructType) {
-      return obj.getStruct(ordinal, ((PhysicalStructType) 
physicalDataType).fields().length);
+    if (physicalDataType instanceof PhysicalStructType dt) {
+      return obj.getStruct(ordinal, dt.fields().length);
     }
     if (physicalDataType instanceof PhysicalArrayType) {
       return obj.getArray(ordinal);
@@ -78,8 +78,8 @@ public final class SpecializedGettersReader {
     if (physicalDataType instanceof PhysicalMapType) {
       return obj.getMap(ordinal);
     }
-    if (handleUserDefinedType && dataType instanceof UserDefinedType) {
-      return obj.get(ordinal, ((UserDefinedType)dataType).sqlType());
+    if (handleUserDefinedType && dataType instanceof UserDefinedType dt) {
+      return obj.get(ordinal, dt.sqlType());
     }
 
     throw new UnsupportedOperationException("Unsupported data type " + 
dataType.simpleString());
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeDataUtils.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeDataUtils.java
index 9b600192ac2..c474e353199 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeDataUtils.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeDataUtils.java
@@ -27,10 +27,10 @@ final class UnsafeDataUtils {
   }
 
   public static byte[] getBytes(Object baseObject, long baseOffset, int 
sizeInBytes) {
-    if (baseObject instanceof byte[]
+    if (baseObject instanceof byte[] bytes
       && baseOffset == Platform.BYTE_ARRAY_OFFSET
-      && (((byte[]) baseObject).length == sizeInBytes)) {
-      return (byte[]) baseObject;
+      && (bytes.length == sizeInBytes)) {
+      return bytes;
     }
     byte[] bytes = new byte[sizeInBytes];
     Platform.copyMemory(baseObject, baseOffset, bytes, 
Platform.BYTE_ARRAY_OFFSET,
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
index fca45c58bee..7345e47b93d 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/catalyst/expressions/UnsafeRow.java
@@ -71,8 +71,8 @@ public final class UnsafeRow extends InternalRow implements 
Externalizable, Kryo
    * Field types that hold fixed-length, store the value directly in an 8-byte 
word
    */
   public static boolean isFixedLength(DataType dt) {
-    if (dt instanceof UserDefinedType) {
-      return isFixedLength(((UserDefinedType<?>) dt).sqlType());
+    if (dt instanceof UserDefinedType udt) {
+      return isFixedLength(udt.sqlType());
     }
     PhysicalDataType pdt = PhysicalDataType.apply(dt);
     if (pdt instanceof PhysicalDecimalType) {
@@ -86,8 +86,8 @@ public final class UnsafeRow extends InternalRow implements 
Externalizable, Kryo
    * Field types that can be updated in place in UnsafeRows (e.g. we support 
set() for these types)
    */
   public static boolean isMutable(DataType dt) {
-    if (dt instanceof UserDefinedType) {
-      return isMutable(((UserDefinedType<?>) dt).sqlType());
+    if (dt instanceof UserDefinedType udt) {
+      return isMutable(udt.sqlType());
     }
     PhysicalDataType pdt = PhysicalDataType.apply(dt);
     return pdt instanceof PhysicalPrimitiveType || pdt instanceof 
PhysicalDecimalType ||
@@ -521,9 +521,9 @@ public final class UnsafeRow extends InternalRow implements 
Externalizable, Kryo
    *                    buffer will not be used and may be null.
    */
   public void writeToStream(OutputStream out, byte[] writeBuffer) throws 
IOException {
-    if (baseObject instanceof byte[]) {
+    if (baseObject instanceof byte[] bytes) {
       int offsetInByteArray = (int) (baseOffset - Platform.BYTE_ARRAY_OFFSET);
-      out.write((byte[]) baseObject, offsetInByteArray, sizeInBytes);
+      out.write(bytes, offsetInByteArray, sizeInBytes);
     } else {
       int dataRemaining = sizeInBytes;
       long rowReadPosition = baseOffset;
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/streaming/Offset.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/streaming/Offset.java
index 818f378624e..8280e81aaa1 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/streaming/Offset.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/read/streaming/Offset.java
@@ -49,8 +49,8 @@ public abstract class Offset {
      */
     @Override
     public boolean equals(Object obj) {
-        if (obj instanceof Offset) {
-            return this.json().equals(((Offset) obj).json());
+        if (obj instanceof Offset offset) {
+            return this.json().equals(offset.json());
         } else {
             return false;
         }
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java
index 506b2c8782e..30ca02a36f1 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/util/V2ExpressionSQLBuilder.java
@@ -78,10 +78,10 @@ public class V2ExpressionSQLBuilder {
   }
 
   public String build(Expression expr) {
-    if (expr instanceof Literal) {
-      return visitLiteral((Literal<?>) expr);
-    } else if (expr instanceof NamedReference) {
-      return visitNamedReference((NamedReference) expr);
+    if (expr instanceof Literal literal) {
+      return visitLiteral(literal);
+    } else if (expr instanceof NamedReference namedReference) {
+      return visitNamedReference(namedReference);
     } else if (expr instanceof Cast cast) {
       return visitCast(build(cast.expression()), cast.dataType());
     } else if (expr instanceof Extract extract) {
@@ -223,16 +223,13 @@ public class V2ExpressionSQLBuilder {
     } else if (expr instanceof Avg avg) {
       return visitAggregateFunction("AVG", avg.isDistinct(),
         expressionsToStringArray(avg.children()));
-    } else if (expr instanceof GeneralAggregateFunc) {
-      GeneralAggregateFunc f = (GeneralAggregateFunc) expr;
+    } else if (expr instanceof GeneralAggregateFunc f) {
       return visitAggregateFunction(f.name(), f.isDistinct(),
         expressionsToStringArray(f.children()));
-    } else if (expr instanceof UserDefinedScalarFunc) {
-      UserDefinedScalarFunc f = (UserDefinedScalarFunc) expr;
+    } else if (expr instanceof UserDefinedScalarFunc f) {
       return visitUserDefinedScalarFunction(f.name(), f.canonicalName(),
         expressionsToStringArray(f.children()));
-    } else if (expr instanceof UserDefinedAggregateFunc) {
-      UserDefinedAggregateFunc f = (UserDefinedAggregateFunc) expr;
+    } else if (expr instanceof UserDefinedAggregateFunc f) {
       return visitUserDefinedAggregateFunction(f.name(), f.canonicalName(), 
f.isDistinct(),
         expressionsToStringArray(f.children()));
     } else {
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
index 31ecf5cbe17..712c689d692 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ArrowColumnVector.java
@@ -151,36 +151,36 @@ public class ArrowColumnVector extends ColumnVector {
   }
 
   void initAccessor(ValueVector vector) {
-    if (vector instanceof BitVector) {
-      accessor = new BooleanAccessor((BitVector) vector);
-    } else if (vector instanceof TinyIntVector) {
-      accessor = new ByteAccessor((TinyIntVector) vector);
-    } else if (vector instanceof SmallIntVector) {
-      accessor = new ShortAccessor((SmallIntVector) vector);
-    } else if (vector instanceof IntVector) {
-      accessor = new IntAccessor((IntVector) vector);
-    } else if (vector instanceof BigIntVector) {
-      accessor = new LongAccessor((BigIntVector) vector);
-    } else if (vector instanceof Float4Vector) {
-      accessor = new FloatAccessor((Float4Vector) vector);
-    } else if (vector instanceof Float8Vector) {
-      accessor = new DoubleAccessor((Float8Vector) vector);
-    } else if (vector instanceof DecimalVector) {
-      accessor = new DecimalAccessor((DecimalVector) vector);
-    } else if (vector instanceof VarCharVector) {
-      accessor = new StringAccessor((VarCharVector) vector);
-    } else if (vector instanceof LargeVarCharVector) {
-      accessor = new LargeStringAccessor((LargeVarCharVector) vector);
-    } else if (vector instanceof VarBinaryVector) {
-      accessor = new BinaryAccessor((VarBinaryVector) vector);
-    } else if (vector instanceof LargeVarBinaryVector) {
-      accessor = new LargeBinaryAccessor((LargeVarBinaryVector) vector);
-    } else if (vector instanceof DateDayVector) {
-      accessor = new DateAccessor((DateDayVector) vector);
-    } else if (vector instanceof TimeStampMicroTZVector) {
-      accessor = new TimestampAccessor((TimeStampMicroTZVector) vector);
-    } else if (vector instanceof TimeStampMicroVector) {
-      accessor = new TimestampNTZAccessor((TimeStampMicroVector) vector);
+    if (vector instanceof BitVector bitVector) {
+      accessor = new BooleanAccessor(bitVector);
+    } else if (vector instanceof TinyIntVector tinyIntVector) {
+      accessor = new ByteAccessor(tinyIntVector);
+    } else if (vector instanceof SmallIntVector smallIntVector) {
+      accessor = new ShortAccessor(smallIntVector);
+    } else if (vector instanceof IntVector intVector) {
+      accessor = new IntAccessor(intVector);
+    } else if (vector instanceof BigIntVector bigIntVector) {
+      accessor = new LongAccessor(bigIntVector);
+    } else if (vector instanceof Float4Vector float4Vector) {
+      accessor = new FloatAccessor(float4Vector);
+    } else if (vector instanceof Float8Vector float8Vector) {
+      accessor = new DoubleAccessor(float8Vector);
+    } else if (vector instanceof DecimalVector decimalVector) {
+      accessor = new DecimalAccessor(decimalVector);
+    } else if (vector instanceof VarCharVector varCharVector) {
+      accessor = new StringAccessor(varCharVector);
+    } else if (vector instanceof LargeVarCharVector largeVarCharVector) {
+      accessor = new LargeStringAccessor(largeVarCharVector);
+    } else if (vector instanceof VarBinaryVector varBinaryVector) {
+      accessor = new BinaryAccessor(varBinaryVector);
+    } else if (vector instanceof LargeVarBinaryVector largeVarBinaryVector) {
+      accessor = new LargeBinaryAccessor(largeVarBinaryVector);
+    } else if (vector instanceof DateDayVector dateDayVector) {
+      accessor = new DateAccessor(dateDayVector);
+    } else if (vector instanceof TimeStampMicroTZVector 
timeStampMicroTZVector) {
+      accessor = new TimestampAccessor(timeStampMicroTZVector);
+    } else if (vector instanceof TimeStampMicroVector timeStampMicroVector) {
+      accessor = new TimestampNTZAccessor(timeStampMicroVector);
     } else if (vector instanceof MapVector mapVector) {
       accessor = new MapAccessor(mapVector);
     } else if (vector instanceof ListVector listVector) {
@@ -192,14 +192,14 @@ public class ArrowColumnVector extends ColumnVector {
       for (int i = 0; i < childColumns.length; ++i) {
         childColumns[i] = new ArrowColumnVector(structVector.getVectorById(i));
       }
-    } else if (vector instanceof NullVector) {
-      accessor = new NullAccessor((NullVector) vector);
-    } else if (vector instanceof IntervalYearVector) {
-      accessor = new IntervalYearAccessor((IntervalYearVector) vector);
-    } else if (vector instanceof DurationVector) {
-      accessor = new DurationAccessor((DurationVector) vector);
-    } else if (vector instanceof IntervalMonthDayNanoVector) {
-      accessor = new IntervalMonthDayNanoAccessor((IntervalMonthDayNanoVector) 
vector);
+    } else if (vector instanceof NullVector nullVector) {
+      accessor = new NullAccessor(nullVector);
+    } else if (vector instanceof IntervalYearVector intervalYearVector) {
+      accessor = new IntervalYearAccessor(intervalYearVector);
+    } else if (vector instanceof DurationVector durationVector) {
+      accessor = new DurationAccessor(durationVector);
+    } else if (vector instanceof IntervalMonthDayNanoVector 
intervalMonthDayNanoVector) {
+      accessor = new IntervalMonthDayNanoAccessor(intervalMonthDayNanoVector);
     } else {
       throw new UnsupportedOperationException();
     }
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
index ac23f70584e..affdb0797ed 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarBatchRow.java
@@ -71,8 +71,8 @@ public final class ColumnarBatchRow extends InternalRow {
           row.update(i, getBinary(i));
         } else if (pdt instanceof PhysicalDecimalType t) {
           row.setDecimal(i, getDecimal(i, t.precision(), t.scale()), 
t.precision());
-        } else if (pdt instanceof PhysicalStructType) {
-          row.update(i, getStruct(i, ((PhysicalStructType) 
pdt).fields().length).copy());
+        } else if (pdt instanceof PhysicalStructType t) {
+          row.update(i, getStruct(i, t.fields().length).copy());
         } else if (pdt instanceof PhysicalArrayType) {
           row.update(i, getArray(i).copy());
         } else if (pdt instanceof PhysicalMapType) {
@@ -184,8 +184,8 @@ public final class ColumnarBatchRow extends InternalRow {
       return getLong(ordinal);
     } else if (dataType instanceof ArrayType) {
       return getArray(ordinal);
-    } else if (dataType instanceof StructType) {
-      return getStruct(ordinal, ((StructType)dataType).fields().length);
+    } else if (dataType instanceof StructType structType) {
+      return getStruct(ordinal, structType.fields().length);
     } else if (dataType instanceof MapType) {
       return getMap(ordinal);
     } else if (dataType instanceof VariantType) {
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java
index 18f6779cccb..23df4b2e196 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/vectorized/ColumnarRow.java
@@ -78,8 +78,8 @@ public final class ColumnarRow extends InternalRow {
           row.update(i, getBinary(i));
         } else if (pdt instanceof PhysicalDecimalType t) {
           row.setDecimal(i, getDecimal(i, t.precision(), t.scale()), 
t.precision());
-        } else if (pdt instanceof PhysicalStructType) {
-          row.update(i, getStruct(i, ((PhysicalStructType) 
pdt).fields().length).copy());
+        } else if (pdt instanceof PhysicalStructType t) {
+          row.update(i, getStruct(i, t.fields().length).copy());
         } else if (pdt instanceof PhysicalArrayType) {
           row.update(i, getArray(i).copy());
         } else if (pdt instanceof PhysicalMapType) {
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcAtomicColumnVector.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcAtomicColumnVector.java
index c2d8334d928..f120482f63f 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcAtomicColumnVector.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcAtomicColumnVector.java
@@ -60,16 +60,16 @@ public class OrcAtomicColumnVector extends OrcColumnVector {
       isDate = false;
     }
 
-    if (vector instanceof LongColumnVector) {
-      longData = (LongColumnVector) vector;
-    } else if (vector instanceof DoubleColumnVector) {
-      doubleData = (DoubleColumnVector) vector;
-    } else if (vector instanceof BytesColumnVector) {
-      bytesData = (BytesColumnVector) vector;
-    } else if (vector instanceof DecimalColumnVector) {
-      decimalData = (DecimalColumnVector) vector;
-    } else if (vector instanceof TimestampColumnVector) {
-      timestampData = (TimestampColumnVector) vector;
+    if (vector instanceof LongColumnVector longColumnVector) {
+      longData = longColumnVector;
+    } else if (vector instanceof DoubleColumnVector doubleColumnVector) {
+      doubleData = doubleColumnVector;
+    } else if (vector instanceof BytesColumnVector bytesColumnVector) {
+      bytesData = bytesColumnVector;
+    } else if (vector instanceof DecimalColumnVector decimalColumnVector) {
+      decimalData = decimalColumnVector;
+    } else if (vector instanceof TimestampColumnVector timestampColumnVector) {
+      timestampData = timestampColumnVector;
     } else {
       throw new UnsupportedOperationException();
     }
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcFooterReader.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcFooterReader.java
index 546b0486488..3de333f256a 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcFooterReader.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/orc/OrcFooterReader.java
@@ -52,15 +52,15 @@ public class OrcFooterReader {
   private static OrcColumnStatistics convertStatistics(
       DataType sparkSchema, Queue<ColumnStatistics> orcStatistics) {
     OrcColumnStatistics statistics = new 
OrcColumnStatistics(orcStatistics.remove());
-    if (sparkSchema instanceof StructType) {
-      for (StructField field : ((StructType) sparkSchema).fields()) {
+    if (sparkSchema instanceof StructType structType) {
+      for (StructField field : structType.fields()) {
         statistics.add(convertStatistics(field.dataType(), orcStatistics));
       }
-    } else if (sparkSchema instanceof MapType) {
-      statistics.add(convertStatistics(((MapType) sparkSchema).keyType(), 
orcStatistics));
-      statistics.add(convertStatistics(((MapType) sparkSchema).valueType(), 
orcStatistics));
-    } else if (sparkSchema instanceof ArrayType) {
-      statistics.add(convertStatistics(((ArrayType) 
sparkSchema).elementType(), orcStatistics));
+    } else if (sparkSchema instanceof MapType mapType) {
+      statistics.add(convertStatistics(mapType.keyType(), orcStatistics));
+      statistics.add(convertStatistics(mapType.valueType(), orcStatistics));
+    } else if (sparkSchema instanceof ArrayType arrayType) {
+      statistics.add(convertStatistics(arrayType.elementType(), 
orcStatistics));
     }
     return statistics;
   }
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorUpdaterFactory.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorUpdaterFactory.java
index 26bef0fe3a6..54b96bfb873 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorUpdaterFactory.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/ParquetVectorUpdaterFactory.java
@@ -192,8 +192,8 @@ public class ParquetVectorUpdaterFactory {
   }
 
   boolean isTimestampTypeMatched(LogicalTypeAnnotation.TimeUnit unit) {
-    return logicalTypeAnnotation instanceof TimestampLogicalTypeAnnotation &&
-      ((TimestampLogicalTypeAnnotation) logicalTypeAnnotation).getUnit() == 
unit;
+    return logicalTypeAnnotation instanceof TimestampLogicalTypeAnnotation 
annotation &&
+      annotation.getUnit() == unit;
   }
 
   void validateTimestampType(DataType sparkType) {
@@ -212,9 +212,8 @@ public class ParquetVectorUpdaterFactory {
   }
 
   boolean isUnsignedIntTypeMatched(int bitWidth) {
-    return logicalTypeAnnotation instanceof IntLogicalTypeAnnotation &&
-      !((IntLogicalTypeAnnotation) logicalTypeAnnotation).isSigned() &&
-      ((IntLogicalTypeAnnotation) logicalTypeAnnotation).getBitWidth() == 
bitWidth;
+    return logicalTypeAnnotation instanceof IntLogicalTypeAnnotation 
annotation &&
+      !annotation.isSigned() && annotation.getBitWidth() == bitWidth;
   }
 
   private static class BooleanUpdater implements ParquetVectorUpdater {
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java
index 64178fdd72d..f185b251ed9 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/datasources/parquet/VectorizedColumnReader.java
@@ -222,9 +222,10 @@ public class VectorizedColumnReader {
           // WritableColumnVector will throw an exception when trying to 
decode to an Int when the
           // dictionary is in fact initialized as Long
           LogicalTypeAnnotation typeAnnotation = 
primitiveType.getLogicalTypeAnnotation();
-          boolean castLongToInt = typeAnnotation instanceof 
DecimalLogicalTypeAnnotation &&
-            ((DecimalLogicalTypeAnnotation) typeAnnotation).getPrecision() <=
-            Decimal.MAX_INT_DIGITS() && primitiveType.getPrimitiveTypeName() 
== INT64;
+          boolean castLongToInt =
+            typeAnnotation instanceof DecimalLogicalTypeAnnotation annotation 
&&
+            annotation.getPrecision() <= Decimal.MAX_INT_DIGITS() &&
+            primitiveType.getPrimitiveTypeName() == INT64;
 
           // We require a long value, but we need to use dictionary to decode 
the original
           // signed int first
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ConstantColumnVector.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ConstantColumnVector.java
index 43854c2300f..8b24973ad3d 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ConstantColumnVector.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/ConstantColumnVector.java
@@ -60,8 +60,8 @@ public class ConstantColumnVector extends ColumnVector {
     super(type);
     this.numRows = numRows;
 
-    if (type instanceof StructType) {
-      this.childData = new ConstantColumnVector[((StructType) 
type).fields().length];
+    if (type instanceof StructType structType) {
+      this.childData = new ConstantColumnVector[structType.fields().length];
     } else if (type instanceof CalendarIntervalType) {
       // Three columns. Months as int. Days as Int. Microseconds as Long.
       this.childData = new ConstantColumnVector[3];
diff --git 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/MutableColumnarRow.java
 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/MutableColumnarRow.java
index 0a110a204e0..a6b353a2e84 100644
--- 
a/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/MutableColumnarRow.java
+++ 
b/sql/core/src/main/java/org/apache/spark/sql/execution/vectorized/MutableColumnarRow.java
@@ -191,8 +191,8 @@ public final class MutableColumnarRow extends InternalRow {
       return getLong(ordinal);
     } else if (dataType instanceof ArrayType) {
       return getArray(ordinal);
-    } else if (dataType instanceof StructType) {
-      return getStruct(ordinal, ((StructType)dataType).fields().length);
+    } else if (dataType instanceof StructType structType) {
+      return getStruct(ordinal, structType.fields().length);
     } else if (dataType instanceof MapType) {
       return getMap(ordinal);
     } else {
diff --git 
a/sql/core/src/test/java/test/org/apache/spark/sql/connector/JavaAdvancedDataSourceV2WithV2Filter.java
 
b/sql/core/src/test/java/test/org/apache/spark/sql/connector/JavaAdvancedDataSourceV2WithV2Filter.java
index ec532da6104..0e3f6aed3b6 100644
--- 
a/sql/core/src/test/java/test/org/apache/spark/sql/connector/JavaAdvancedDataSourceV2WithV2Filter.java
+++ 
b/sql/core/src/test/java/test/org/apache/spark/sql/connector/JavaAdvancedDataSourceV2WithV2Filter.java
@@ -129,8 +129,8 @@ public class JavaAdvancedDataSourceV2WithV2Filter 
implements TestingV2Source {
           FieldReference column = (FieldReference) predicate.children()[0];
           assert(predicate.children()[1] instanceof LiteralValue);
           Literal value = (Literal) predicate.children()[1];
-          if ("i".equals(column.describe()) && value.value() instanceof 
Integer) {
-            lowerBound = (Integer) value.value();
+          if ("i".equals(column.describe()) && value.value() instanceof 
Integer integer) {
+            lowerBound = integer;
             break;
           }
         }
diff --git a/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java 
b/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java
index 48891fdcb1d..9afe53fe825 100644
--- a/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java
+++ b/sql/hive/src/test/java/org/apache/spark/sql/hive/test/Complex.java
@@ -548,7 +548,7 @@ public class Complex implements 
org.apache.thrift.TBase<Complex, Complex._Fields
       return false;
     }
     if (that instanceof Complex) {
-      return this.equals((Complex)that);
+      return this.equals((Complex) that);
     }
     return false;
   }
diff --git 
a/streaming/src/test/java/org/apache/spark/streaming/JavaWriteAheadLogSuite.java
 
b/streaming/src/test/java/org/apache/spark/streaming/JavaWriteAheadLogSuite.java
index 3e96250b919..41c83b98ea2 100644
--- 
a/streaming/src/test/java/org/apache/spark/streaming/JavaWriteAheadLogSuite.java
+++ 
b/streaming/src/test/java/org/apache/spark/streaming/JavaWriteAheadLogSuite.java
@@ -67,8 +67,8 @@ public class JavaWriteAheadLogSuite extends WriteAheadLog {
 
   @Override
   public ByteBuffer read(WriteAheadLogRecordHandle handle) {
-    if (handle instanceof JavaWriteAheadLogSuiteHandle) {
-      int reqdIndex = ((JavaWriteAheadLogSuiteHandle) handle).index;
+    if (handle instanceof JavaWriteAheadLogSuiteHandle 
javaWriteAheadLogSuiteHandle) {
+      int reqdIndex = javaWriteAheadLogSuiteHandle.index;
       for (Record record: records) {
         if (record.index == reqdIndex) {
           return record.buffer;


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to