This is an automated email from the ASF dual-hosted git repository.

etudenhoefner pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/iceberg.git


The following commit(s) were added to refs/heads/main by this push:
     new 9281c1b6ee AWS, Spark, Flink: Remove `org.jetbrains.annotations` 
(#14192)
9281c1b6ee is described below

commit 9281c1b6ee18629adec1ec775644ff240eeb7efa
Author: Manu Zhang <[email protected]>
AuthorDate: Mon Sep 29 21:29:18 2025 +0800

    AWS, Spark, Flink: Remove `org.jetbrains.annotations` (#14192)
---
 .../iceberg/aws/s3/signer/TestS3RestSigner.java    |  4 +--
 build.gradle                                       |  1 +
 flink/v1.20/flink-runtime/LICENSE                  |  8 -----
 .../sink/shuffle/DataStatisticsCoordinator.java    |  6 ++--
 .../flink/maintenance/operator/ManualSource.java   |  2 +-
 .../flink/sink/dynamic/TestDynamicWriter.java      |  8 ++---
 flink/v2.0/flink-runtime/LICENSE                   |  8 -----
 .../sink/shuffle/DataStatisticsCoordinator.java    |  6 ++--
 .../flink/maintenance/operator/ManualSource.java   |  2 +-
 .../flink/sink/dynamic/TestDynamicWriter.java      |  8 ++---
 flink/v2.1/flink-runtime/LICENSE                   |  8 -----
 .../sink/shuffle/DataStatisticsCoordinator.java    |  6 ++--
 .../flink/maintenance/operator/ManualSource.java   |  2 +-
 .../flink/sink/dynamic/TestDynamicWriter.java      |  8 ++---
 kafka-connect/kafka-connect-runtime/hive/LICENSE   |  6 ----
 kafka-connect/kafka-connect-runtime/main/LICENSE   |  6 ----
 spark/v3.4/spark-runtime/LICENSE                   |  8 -----
 .../org/apache/iceberg/spark/SparkTableUtil.java   | 36 +++++++++++-----------
 .../spark/source/TestSparkReaderDeletes.java       |  4 +--
 spark/v3.5/spark-runtime/LICENSE                   |  8 -----
 .../org/apache/iceberg/spark/SparkTableUtil.java   | 36 +++++++++++-----------
 .../spark/source/TestSparkReaderDeletes.java       |  4 +--
 spark/v4.0/spark-runtime/LICENSE                   |  8 -----
 .../org/apache/iceberg/spark/SparkTableUtil.java   | 36 +++++++++++-----------
 .../spark/source/TestSparkReaderDeletes.java       |  4 +--
 25 files changed, 87 insertions(+), 146 deletions(-)

diff --git 
a/aws/src/integration/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java
 
b/aws/src/integration/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java
index acc95a0c40..6dac75aa49 100644
--- 
a/aws/src/integration/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java
+++ 
b/aws/src/integration/java/org/apache/iceberg/aws/s3/signer/TestS3RestSigner.java
@@ -31,6 +31,7 @@ import java.util.Locale;
 import java.util.Map;
 import java.util.concurrent.ScheduledThreadPoolExecutor;
 import java.util.stream.Collectors;
+import javax.annotation.Nonnull;
 import org.apache.iceberg.aws.s3.MinioUtil;
 import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
 import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
@@ -39,7 +40,6 @@ import org.eclipse.jetty.server.Server;
 import org.eclipse.jetty.server.handler.gzip.GzipHandler;
 import org.eclipse.jetty.servlet.ServletContextHandler;
 import org.eclipse.jetty.servlet.ServletHolder;
-import org.jetbrains.annotations.NotNull;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.BeforeAll;
@@ -343,7 +343,7 @@ public class TestS3RestSigner {
       return awsResult;
     }
 
-    @NotNull
+    @Nonnull
     private SdkHttpFullRequest signWithAwsSigner(
         SdkHttpFullRequest request, AwsS3V4SignerParams signerParams) {
       // we need to filter out the unsigned headers for the AWS signer and 
re-append those headers
diff --git a/build.gradle b/build.gradle
index ba15f59d5f..9d509f22b2 100644
--- a/build.gradle
+++ b/build.gradle
@@ -182,6 +182,7 @@ subprojects {
       exclude group: 'com.sun.jersey'
       exclude group: 'com.sun.jersey.contribs'
       exclude group: 'org.pentaho', module: 'pentaho-aggdesigner-algorithm'
+      exclude group: 'org.jetbrains', module: 'annotations'
     }
 
     testArtifacts
diff --git a/flink/v1.20/flink-runtime/LICENSE 
b/flink/v1.20/flink-runtime/LICENSE
index 9ca869edb5..5f8bfb4edb 100644
--- a/flink/v1.20/flink-runtime/LICENSE
+++ b/flink/v1.20/flink-runtime/LICENSE
@@ -267,14 +267,6 @@ License: http://www.apache.org/licenses/LICENSE-2.0
 
 
--------------------------------------------------------------------------------
 
-This binary artifact contains JetBrains annotations.
-
-Copyright: 2000-2020 JetBrains s.r.o.
-Home page: https://github.com/JetBrains/java-annotations
-License: http://www.apache.org/licenses/LICENSE-2.0
-
---------------------------------------------------------------------------------
-
 This binary artifact contains Google Guava.
 
 Copyright: 2006-2020 The Guava Authors
diff --git 
a/flink/v1.20/flink/src/main/java/org/apache/iceberg/flink/sink/shuffle/DataStatisticsCoordinator.java
 
b/flink/v1.20/flink/src/main/java/org/apache/iceberg/flink/sink/shuffle/DataStatisticsCoordinator.java
index 773d0fe6c6..2dcc75b7f6 100644
--- 
a/flink/v1.20/flink/src/main/java/org/apache/iceberg/flink/sink/shuffle/DataStatisticsCoordinator.java
+++ 
b/flink/v1.20/flink/src/main/java/org/apache/iceberg/flink/sink/shuffle/DataStatisticsCoordinator.java
@@ -27,6 +27,8 @@ import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ThreadFactory;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.api.common.typeutils.TypeSerializer;
 import org.apache.flink.runtime.operators.coordination.OperatorCoordinator;
@@ -44,8 +46,6 @@ import 
org.apache.iceberg.relocated.com.google.common.base.Preconditions;
 import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
 import org.apache.iceberg.relocated.com.google.common.collect.Maps;
 import org.apache.iceberg.types.Comparators;
-import org.jetbrains.annotations.NotNull;
-import org.jetbrains.annotations.Nullable;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -517,7 +517,7 @@ class DataStatisticsCoordinator implements 
OperatorCoordinator {
     }
 
     @Override
-    public synchronized Thread newThread(@NotNull Runnable runnable) {
+    public synchronized Thread newThread(@Nonnull Runnable runnable) {
       thread = new Thread(runnable, coordinatorThreadName);
       thread.setContextClassLoader(classLoader);
       thread.setUncaughtExceptionHandler(this);
diff --git 
a/flink/v1.20/flink/src/test/java/org/apache/iceberg/flink/maintenance/operator/ManualSource.java
 
b/flink/v1.20/flink/src/test/java/org/apache/iceberg/flink/maintenance/operator/ManualSource.java
index eff32fcfa1..73a5fcedd2 100644
--- 
a/flink/v1.20/flink/src/test/java/org/apache/iceberg/flink/maintenance/operator/ManualSource.java
+++ 
b/flink/v1.20/flink/src/test/java/org/apache/iceberg/flink/maintenance/operator/ManualSource.java
@@ -22,6 +22,7 @@ import java.util.ArrayDeque;
 import java.util.Collections;
 import java.util.List;
 import java.util.concurrent.CompletableFuture;
+import javax.annotation.Nullable;
 import org.apache.flink.api.common.eventtime.Watermark;
 import org.apache.flink.api.common.eventtime.WatermarkStrategy;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
@@ -41,7 +42,6 @@ import org.apache.flink.streaming.api.datastream.DataStream;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.iceberg.relocated.com.google.common.collect.Lists;
 import org.apache.iceberg.relocated.com.google.common.collect.Queues;
-import org.jetbrains.annotations.Nullable;
 
 /** Testing source implementation for Flink sources which can be triggered 
manually. */
 public class ManualSource<T>
diff --git 
a/flink/v1.20/flink/src/test/java/org/apache/iceberg/flink/sink/dynamic/TestDynamicWriter.java
 
b/flink/v1.20/flink/src/test/java/org/apache/iceberg/flink/sink/dynamic/TestDynamicWriter.java
index 42875982a0..91a3a5d5a7 100644
--- 
a/flink/v1.20/flink/src/test/java/org/apache/iceberg/flink/sink/dynamic/TestDynamicWriter.java
+++ 
b/flink/v1.20/flink/src/test/java/org/apache/iceberg/flink/sink/dynamic/TestDynamicWriter.java
@@ -25,6 +25,7 @@ import java.io.File;
 import java.net.URI;
 import java.util.Collection;
 import java.util.Map;
+import javax.annotation.Nonnull;
 import org.apache.flink.metrics.groups.UnregisteredMetricsGroup;
 import org.apache.flink.table.data.RowData;
 import org.apache.iceberg.FileFormat;
@@ -40,7 +41,6 @@ import org.apache.iceberg.io.TaskWriter;
 import org.apache.iceberg.io.WriteResult;
 import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
 import org.apache.iceberg.relocated.com.google.common.collect.Sets;
-import org.jetbrains.annotations.NotNull;
 import org.junit.jupiter.api.Test;
 
 class TestDynamicWriter extends TestFlinkIcebergSinkBase {
@@ -201,7 +201,7 @@ class TestDynamicWriter extends TestFlinkIcebergSinkBase {
             "Equality field columns shouldn't be empty when configuring to use 
UPSERT data.");
   }
 
-  private static @NotNull DynamicWriter createDynamicWriter(
+  private static @Nonnull DynamicWriter createDynamicWriter(
       Catalog catalog, Map<String, String> properties) {
     DynamicWriter dynamicWriter =
         new DynamicWriter(
@@ -216,11 +216,11 @@ class TestDynamicWriter extends TestFlinkIcebergSinkBase {
     return dynamicWriter;
   }
 
-  private static @NotNull DynamicWriter createDynamicWriter(Catalog catalog) {
+  private static @Nonnull DynamicWriter createDynamicWriter(Catalog catalog) {
     return createDynamicWriter(catalog, Map.of());
   }
 
-  private static @NotNull DynamicRecordInternal getDynamicRecordInternal(Table 
table1) {
+  private static @Nonnull DynamicRecordInternal getDynamicRecordInternal(Table 
table1) {
     DynamicRecordInternal record = new DynamicRecordInternal();
     record.setTableName(TableIdentifier.parse(table1.name()).name());
     record.setSchema(table1.schema());
diff --git a/flink/v2.0/flink-runtime/LICENSE b/flink/v2.0/flink-runtime/LICENSE
index 9ca869edb5..5f8bfb4edb 100644
--- a/flink/v2.0/flink-runtime/LICENSE
+++ b/flink/v2.0/flink-runtime/LICENSE
@@ -267,14 +267,6 @@ License: http://www.apache.org/licenses/LICENSE-2.0
 
 
--------------------------------------------------------------------------------
 
-This binary artifact contains JetBrains annotations.
-
-Copyright: 2000-2020 JetBrains s.r.o.
-Home page: https://github.com/JetBrains/java-annotations
-License: http://www.apache.org/licenses/LICENSE-2.0
-
---------------------------------------------------------------------------------
-
 This binary artifact contains Google Guava.
 
 Copyright: 2006-2020 The Guava Authors
diff --git 
a/flink/v2.0/flink/src/main/java/org/apache/iceberg/flink/sink/shuffle/DataStatisticsCoordinator.java
 
b/flink/v2.0/flink/src/main/java/org/apache/iceberg/flink/sink/shuffle/DataStatisticsCoordinator.java
index 773d0fe6c6..2dcc75b7f6 100644
--- 
a/flink/v2.0/flink/src/main/java/org/apache/iceberg/flink/sink/shuffle/DataStatisticsCoordinator.java
+++ 
b/flink/v2.0/flink/src/main/java/org/apache/iceberg/flink/sink/shuffle/DataStatisticsCoordinator.java
@@ -27,6 +27,8 @@ import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ThreadFactory;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.api.common.typeutils.TypeSerializer;
 import org.apache.flink.runtime.operators.coordination.OperatorCoordinator;
@@ -44,8 +46,6 @@ import 
org.apache.iceberg.relocated.com.google.common.base.Preconditions;
 import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
 import org.apache.iceberg.relocated.com.google.common.collect.Maps;
 import org.apache.iceberg.types.Comparators;
-import org.jetbrains.annotations.NotNull;
-import org.jetbrains.annotations.Nullable;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -517,7 +517,7 @@ class DataStatisticsCoordinator implements 
OperatorCoordinator {
     }
 
     @Override
-    public synchronized Thread newThread(@NotNull Runnable runnable) {
+    public synchronized Thread newThread(@Nonnull Runnable runnable) {
       thread = new Thread(runnable, coordinatorThreadName);
       thread.setContextClassLoader(classLoader);
       thread.setUncaughtExceptionHandler(this);
diff --git 
a/flink/v2.0/flink/src/test/java/org/apache/iceberg/flink/maintenance/operator/ManualSource.java
 
b/flink/v2.0/flink/src/test/java/org/apache/iceberg/flink/maintenance/operator/ManualSource.java
index eff32fcfa1..73a5fcedd2 100644
--- 
a/flink/v2.0/flink/src/test/java/org/apache/iceberg/flink/maintenance/operator/ManualSource.java
+++ 
b/flink/v2.0/flink/src/test/java/org/apache/iceberg/flink/maintenance/operator/ManualSource.java
@@ -22,6 +22,7 @@ import java.util.ArrayDeque;
 import java.util.Collections;
 import java.util.List;
 import java.util.concurrent.CompletableFuture;
+import javax.annotation.Nullable;
 import org.apache.flink.api.common.eventtime.Watermark;
 import org.apache.flink.api.common.eventtime.WatermarkStrategy;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
@@ -41,7 +42,6 @@ import org.apache.flink.streaming.api.datastream.DataStream;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.iceberg.relocated.com.google.common.collect.Lists;
 import org.apache.iceberg.relocated.com.google.common.collect.Queues;
-import org.jetbrains.annotations.Nullable;
 
 /** Testing source implementation for Flink sources which can be triggered 
manually. */
 public class ManualSource<T>
diff --git 
a/flink/v2.0/flink/src/test/java/org/apache/iceberg/flink/sink/dynamic/TestDynamicWriter.java
 
b/flink/v2.0/flink/src/test/java/org/apache/iceberg/flink/sink/dynamic/TestDynamicWriter.java
index 42875982a0..91a3a5d5a7 100644
--- 
a/flink/v2.0/flink/src/test/java/org/apache/iceberg/flink/sink/dynamic/TestDynamicWriter.java
+++ 
b/flink/v2.0/flink/src/test/java/org/apache/iceberg/flink/sink/dynamic/TestDynamicWriter.java
@@ -25,6 +25,7 @@ import java.io.File;
 import java.net.URI;
 import java.util.Collection;
 import java.util.Map;
+import javax.annotation.Nonnull;
 import org.apache.flink.metrics.groups.UnregisteredMetricsGroup;
 import org.apache.flink.table.data.RowData;
 import org.apache.iceberg.FileFormat;
@@ -40,7 +41,6 @@ import org.apache.iceberg.io.TaskWriter;
 import org.apache.iceberg.io.WriteResult;
 import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
 import org.apache.iceberg.relocated.com.google.common.collect.Sets;
-import org.jetbrains.annotations.NotNull;
 import org.junit.jupiter.api.Test;
 
 class TestDynamicWriter extends TestFlinkIcebergSinkBase {
@@ -201,7 +201,7 @@ class TestDynamicWriter extends TestFlinkIcebergSinkBase {
             "Equality field columns shouldn't be empty when configuring to use 
UPSERT data.");
   }
 
-  private static @NotNull DynamicWriter createDynamicWriter(
+  private static @Nonnull DynamicWriter createDynamicWriter(
       Catalog catalog, Map<String, String> properties) {
     DynamicWriter dynamicWriter =
         new DynamicWriter(
@@ -216,11 +216,11 @@ class TestDynamicWriter extends TestFlinkIcebergSinkBase {
     return dynamicWriter;
   }
 
-  private static @NotNull DynamicWriter createDynamicWriter(Catalog catalog) {
+  private static @Nonnull DynamicWriter createDynamicWriter(Catalog catalog) {
     return createDynamicWriter(catalog, Map.of());
   }
 
-  private static @NotNull DynamicRecordInternal getDynamicRecordInternal(Table 
table1) {
+  private static @Nonnull DynamicRecordInternal getDynamicRecordInternal(Table 
table1) {
     DynamicRecordInternal record = new DynamicRecordInternal();
     record.setTableName(TableIdentifier.parse(table1.name()).name());
     record.setSchema(table1.schema());
diff --git a/flink/v2.1/flink-runtime/LICENSE b/flink/v2.1/flink-runtime/LICENSE
index 9ca869edb5..5f8bfb4edb 100644
--- a/flink/v2.1/flink-runtime/LICENSE
+++ b/flink/v2.1/flink-runtime/LICENSE
@@ -267,14 +267,6 @@ License: http://www.apache.org/licenses/LICENSE-2.0
 
 
--------------------------------------------------------------------------------
 
-This binary artifact contains JetBrains annotations.
-
-Copyright: 2000-2020 JetBrains s.r.o.
-Home page: https://github.com/JetBrains/java-annotations
-License: http://www.apache.org/licenses/LICENSE-2.0
-
---------------------------------------------------------------------------------
-
 This binary artifact contains Google Guava.
 
 Copyright: 2006-2020 The Guava Authors
diff --git 
a/flink/v2.1/flink/src/main/java/org/apache/iceberg/flink/sink/shuffle/DataStatisticsCoordinator.java
 
b/flink/v2.1/flink/src/main/java/org/apache/iceberg/flink/sink/shuffle/DataStatisticsCoordinator.java
index 773d0fe6c6..2dcc75b7f6 100644
--- 
a/flink/v2.1/flink/src/main/java/org/apache/iceberg/flink/sink/shuffle/DataStatisticsCoordinator.java
+++ 
b/flink/v2.1/flink/src/main/java/org/apache/iceberg/flink/sink/shuffle/DataStatisticsCoordinator.java
@@ -27,6 +27,8 @@ import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 import java.util.concurrent.ThreadFactory;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
 import org.apache.flink.annotation.Internal;
 import org.apache.flink.api.common.typeutils.TypeSerializer;
 import org.apache.flink.runtime.operators.coordination.OperatorCoordinator;
@@ -44,8 +46,6 @@ import 
org.apache.iceberg.relocated.com.google.common.base.Preconditions;
 import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
 import org.apache.iceberg.relocated.com.google.common.collect.Maps;
 import org.apache.iceberg.types.Comparators;
-import org.jetbrains.annotations.NotNull;
-import org.jetbrains.annotations.Nullable;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -517,7 +517,7 @@ class DataStatisticsCoordinator implements 
OperatorCoordinator {
     }
 
     @Override
-    public synchronized Thread newThread(@NotNull Runnable runnable) {
+    public synchronized Thread newThread(@Nonnull Runnable runnable) {
       thread = new Thread(runnable, coordinatorThreadName);
       thread.setContextClassLoader(classLoader);
       thread.setUncaughtExceptionHandler(this);
diff --git 
a/flink/v2.1/flink/src/test/java/org/apache/iceberg/flink/maintenance/operator/ManualSource.java
 
b/flink/v2.1/flink/src/test/java/org/apache/iceberg/flink/maintenance/operator/ManualSource.java
index eff32fcfa1..73a5fcedd2 100644
--- 
a/flink/v2.1/flink/src/test/java/org/apache/iceberg/flink/maintenance/operator/ManualSource.java
+++ 
b/flink/v2.1/flink/src/test/java/org/apache/iceberg/flink/maintenance/operator/ManualSource.java
@@ -22,6 +22,7 @@ import java.util.ArrayDeque;
 import java.util.Collections;
 import java.util.List;
 import java.util.concurrent.CompletableFuture;
+import javax.annotation.Nullable;
 import org.apache.flink.api.common.eventtime.Watermark;
 import org.apache.flink.api.common.eventtime.WatermarkStrategy;
 import org.apache.flink.api.common.typeinfo.TypeInformation;
@@ -41,7 +42,6 @@ import org.apache.flink.streaming.api.datastream.DataStream;
 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
 import org.apache.iceberg.relocated.com.google.common.collect.Lists;
 import org.apache.iceberg.relocated.com.google.common.collect.Queues;
-import org.jetbrains.annotations.Nullable;
 
 /** Testing source implementation for Flink sources which can be triggered 
manually. */
 public class ManualSource<T>
diff --git 
a/flink/v2.1/flink/src/test/java/org/apache/iceberg/flink/sink/dynamic/TestDynamicWriter.java
 
b/flink/v2.1/flink/src/test/java/org/apache/iceberg/flink/sink/dynamic/TestDynamicWriter.java
index 42875982a0..91a3a5d5a7 100644
--- 
a/flink/v2.1/flink/src/test/java/org/apache/iceberg/flink/sink/dynamic/TestDynamicWriter.java
+++ 
b/flink/v2.1/flink/src/test/java/org/apache/iceberg/flink/sink/dynamic/TestDynamicWriter.java
@@ -25,6 +25,7 @@ import java.io.File;
 import java.net.URI;
 import java.util.Collection;
 import java.util.Map;
+import javax.annotation.Nonnull;
 import org.apache.flink.metrics.groups.UnregisteredMetricsGroup;
 import org.apache.flink.table.data.RowData;
 import org.apache.iceberg.FileFormat;
@@ -40,7 +41,6 @@ import org.apache.iceberg.io.TaskWriter;
 import org.apache.iceberg.io.WriteResult;
 import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
 import org.apache.iceberg.relocated.com.google.common.collect.Sets;
-import org.jetbrains.annotations.NotNull;
 import org.junit.jupiter.api.Test;
 
 class TestDynamicWriter extends TestFlinkIcebergSinkBase {
@@ -201,7 +201,7 @@ class TestDynamicWriter extends TestFlinkIcebergSinkBase {
             "Equality field columns shouldn't be empty when configuring to use 
UPSERT data.");
   }
 
-  private static @NotNull DynamicWriter createDynamicWriter(
+  private static @Nonnull DynamicWriter createDynamicWriter(
       Catalog catalog, Map<String, String> properties) {
     DynamicWriter dynamicWriter =
         new DynamicWriter(
@@ -216,11 +216,11 @@ class TestDynamicWriter extends TestFlinkIcebergSinkBase {
     return dynamicWriter;
   }
 
-  private static @NotNull DynamicWriter createDynamicWriter(Catalog catalog) {
+  private static @Nonnull DynamicWriter createDynamicWriter(Catalog catalog) {
     return createDynamicWriter(catalog, Map.of());
   }
 
-  private static @NotNull DynamicRecordInternal getDynamicRecordInternal(Table 
table1) {
+  private static @Nonnull DynamicRecordInternal getDynamicRecordInternal(Table 
table1) {
     DynamicRecordInternal record = new DynamicRecordInternal();
     record.setTableName(TableIdentifier.parse(table1.name()).name());
     record.setSchema(table1.schema());
diff --git a/kafka-connect/kafka-connect-runtime/hive/LICENSE 
b/kafka-connect/kafka-connect-runtime/hive/LICENSE
index cdbfe40141..1f18593b0f 100644
--- a/kafka-connect/kafka-connect-runtime/hive/LICENSE
+++ b/kafka-connect/kafka-connect-runtime/hive/LICENSE
@@ -1644,12 +1644,6 @@ License (from POM): Eclipse Public License - Version 1.0 
- https://www.eclipse.o
 
 
--------------------------------------------------------------------------------
 
-Group: org.jetbrains  Name: annotations  Version: 17.0.0
-Project URL (from POM): https://github.com/JetBrains/java-annotations
-License (from POM): The Apache Software License, Version 2.0 - 
https://www.apache.org/licenses/LICENSE-2.0.txt
-
---------------------------------------------------------------------------------
-
 Group: org.jline  Name: jline  Version: 3.9.0
 License (from POM): The BSD License - 
http://www.opensource.org/licenses/bsd-license.php
 
diff --git a/kafka-connect/kafka-connect-runtime/main/LICENSE 
b/kafka-connect/kafka-connect-runtime/main/LICENSE
index 666c1b0478..c577e14381 100644
--- a/kafka-connect/kafka-connect-runtime/main/LICENSE
+++ b/kafka-connect/kafka-connect-runtime/main/LICENSE
@@ -1169,12 +1169,6 @@ License (from POM): Apache 2 - 
https://www.apache.org/licenses/LICENSE-2.0
 
 
--------------------------------------------------------------------------------
 
-Group: org.jetbrains  Name: annotations  Version: 17.0.0
-Project URL (from POM): https://github.com/JetBrains/java-annotations
-License (from POM): The Apache Software License, Version 2.0 - 
https://www.apache.org/licenses/LICENSE-2.0.txt
-
---------------------------------------------------------------------------------
-
 Group: org.ow2.asm  Name: asm  Version: 9.7.1
 Project URL (from manifest): http://asm.ow2.org
 Manifest License: BSD-3-Clause;link=https://asm.ow2.io/LICENSE.txt (Not 
packaged)
diff --git a/spark/v3.4/spark-runtime/LICENSE b/spark/v3.4/spark-runtime/LICENSE
index 1054b33f52..e36e0b6845 100644
--- a/spark/v3.4/spark-runtime/LICENSE
+++ b/spark/v3.4/spark-runtime/LICENSE
@@ -318,14 +318,6 @@ License: http://www.apache.org/licenses/LICENSE-2.0
 
 
--------------------------------------------------------------------------------
 
-This binary artifact contains JetBrains annotations.
-
-Copyright: 2000-2020 JetBrains s.r.o.
-Home page: https://github.com/JetBrains/java-annotations
-License: http://www.apache.org/licenses/LICENSE-2.0
-
---------------------------------------------------------------------------------
-
 This binary artifact contains code from Cloudera Kite.
 
 Copyright: 2013-2017 Cloudera Inc.
diff --git 
a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java 
b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java
index ade2bd5b5c..20032c898a 100644
--- 
a/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java
+++ 
b/spark/v3.4/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java
@@ -39,6 +39,8 @@ import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 import java.util.stream.Collectors;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.iceberg.AppendFiles;
@@ -105,8 +107,6 @@ import org.apache.spark.sql.catalyst.parser.ParseException;
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan;
 import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation;
 import org.apache.spark.sql.util.CaseInsensitiveStringMap;
-import org.jetbrains.annotations.NotNull;
-import org.jetbrains.annotations.Nullable;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import scala.Function2;
@@ -1186,7 +1186,7 @@ public class SparkTableUtil {
       getService().shutdown();
     }
 
-    @NotNull
+    @Nonnull
     @Override
     public List<Runnable> shutdownNow() {
       return getService().shutdownNow();
@@ -1203,60 +1203,60 @@ public class SparkTableUtil {
     }
 
     @Override
-    public boolean awaitTermination(long timeout, @NotNull TimeUnit unit)
+    public boolean awaitTermination(long timeout, @Nonnull TimeUnit unit)
         throws InterruptedException {
       return getService().awaitTermination(timeout, unit);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public <T> Future<T> submit(@NotNull Callable<T> task) {
+    public <T> Future<T> submit(@Nonnull Callable<T> task) {
       return getService().submit(task);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public <T> Future<T> submit(@NotNull Runnable task, T result) {
+    public <T> Future<T> submit(@Nonnull Runnable task, T result) {
       return getService().submit(task, result);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public Future<?> submit(@NotNull Runnable task) {
+    public Future<?> submit(@Nonnull Runnable task) {
       return getService().submit(task);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public <T> List<Future<T>> invokeAll(@NotNull Collection<? extends 
Callable<T>> tasks)
+    public <T> List<Future<T>> invokeAll(@Nonnull Collection<? extends 
Callable<T>> tasks)
         throws InterruptedException {
       return getService().invokeAll(tasks);
     }
 
-    @NotNull
+    @Nonnull
     @Override
     public <T> List<Future<T>> invokeAll(
-        @NotNull Collection<? extends Callable<T>> tasks, long timeout, 
@NotNull TimeUnit unit)
+        @Nonnull Collection<? extends Callable<T>> tasks, long timeout, 
@Nonnull TimeUnit unit)
         throws InterruptedException {
       return getService().invokeAll(tasks, timeout, unit);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public <T> T invokeAny(@NotNull Collection<? extends Callable<T>> tasks)
+    public <T> T invokeAny(@Nonnull Collection<? extends Callable<T>> tasks)
         throws InterruptedException, ExecutionException {
       return getService().invokeAny(tasks);
     }
 
     @Override
     public <T> T invokeAny(
-        @NotNull Collection<? extends Callable<T>> tasks, long timeout, 
@NotNull TimeUnit unit)
+        @Nonnull Collection<? extends Callable<T>> tasks, long timeout, 
@Nonnull TimeUnit unit)
         throws InterruptedException, ExecutionException, TimeoutException {
       return getService().invokeAny(tasks, timeout, unit);
     }
 
     @Override
-    public void execute(@NotNull Runnable command) {
+    public void execute(@Nonnull Runnable command) {
       getService().execute(command);
     }
 
diff --git 
a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java
 
b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java
index 16fa726032..64dbcbf5d0 100644
--- 
a/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java
+++ 
b/spark/v3.4/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java
@@ -28,6 +28,7 @@ import java.io.File;
 import java.io.IOException;
 import java.util.List;
 import java.util.Set;
+import javax.annotation.Nonnull;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -86,7 +87,6 @@ import org.apache.spark.sql.SparkSession;
 import org.apache.spark.sql.catalyst.InternalRow;
 import org.apache.spark.sql.internal.SQLConf;
 import org.apache.spark.sql.types.StructType;
-import org.jetbrains.annotations.NotNull;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.BeforeAll;
@@ -673,7 +673,7 @@ public class TestSparkReaderDeletes extends DeleteReadTests 
{
     return set;
   }
 
-  @NotNull
+  @Nonnull
   private static List recordsWithDeletedColumn() {
     List records = Lists.newArrayList();
 
diff --git a/spark/v3.5/spark-runtime/LICENSE b/spark/v3.5/spark-runtime/LICENSE
index 1054b33f52..e36e0b6845 100644
--- a/spark/v3.5/spark-runtime/LICENSE
+++ b/spark/v3.5/spark-runtime/LICENSE
@@ -318,14 +318,6 @@ License: http://www.apache.org/licenses/LICENSE-2.0
 
 
--------------------------------------------------------------------------------
 
-This binary artifact contains JetBrains annotations.
-
-Copyright: 2000-2020 JetBrains s.r.o.
-Home page: https://github.com/JetBrains/java-annotations
-License: http://www.apache.org/licenses/LICENSE-2.0
-
---------------------------------------------------------------------------------
-
 This binary artifact contains code from Cloudera Kite.
 
 Copyright: 2013-2017 Cloudera Inc.
diff --git 
a/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java 
b/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java
index f3e35074a5..4e9573d555 100644
--- 
a/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java
+++ 
b/spark/v3.5/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java
@@ -39,6 +39,8 @@ import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 import java.util.stream.Collectors;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.iceberg.AppendFiles;
@@ -105,8 +107,6 @@ import org.apache.spark.sql.catalyst.parser.ParseException;
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan;
 import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation;
 import org.apache.spark.sql.util.CaseInsensitiveStringMap;
-import org.jetbrains.annotations.NotNull;
-import org.jetbrains.annotations.Nullable;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import scala.Function2;
@@ -1096,7 +1096,7 @@ public class SparkTableUtil {
       getService().shutdown();
     }
 
-    @NotNull
+    @Nonnull
     @Override
     public List<Runnable> shutdownNow() {
       return getService().shutdownNow();
@@ -1113,60 +1113,60 @@ public class SparkTableUtil {
     }
 
     @Override
-    public boolean awaitTermination(long timeout, @NotNull TimeUnit unit)
+    public boolean awaitTermination(long timeout, @Nonnull TimeUnit unit)
         throws InterruptedException {
       return getService().awaitTermination(timeout, unit);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public <T> Future<T> submit(@NotNull Callable<T> task) {
+    public <T> Future<T> submit(@Nonnull Callable<T> task) {
       return getService().submit(task);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public <T> Future<T> submit(@NotNull Runnable task, T result) {
+    public <T> Future<T> submit(@Nonnull Runnable task, T result) {
       return getService().submit(task, result);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public Future<?> submit(@NotNull Runnable task) {
+    public Future<?> submit(@Nonnull Runnable task) {
       return getService().submit(task);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public <T> List<Future<T>> invokeAll(@NotNull Collection<? extends 
Callable<T>> tasks)
+    public <T> List<Future<T>> invokeAll(@Nonnull Collection<? extends 
Callable<T>> tasks)
         throws InterruptedException {
       return getService().invokeAll(tasks);
     }
 
-    @NotNull
+    @Nonnull
     @Override
     public <T> List<Future<T>> invokeAll(
-        @NotNull Collection<? extends Callable<T>> tasks, long timeout, 
@NotNull TimeUnit unit)
+        @Nonnull Collection<? extends Callable<T>> tasks, long timeout, 
@Nonnull TimeUnit unit)
         throws InterruptedException {
       return getService().invokeAll(tasks, timeout, unit);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public <T> T invokeAny(@NotNull Collection<? extends Callable<T>> tasks)
+    public <T> T invokeAny(@Nonnull Collection<? extends Callable<T>> tasks)
         throws InterruptedException, ExecutionException {
       return getService().invokeAny(tasks);
     }
 
     @Override
     public <T> T invokeAny(
-        @NotNull Collection<? extends Callable<T>> tasks, long timeout, 
@NotNull TimeUnit unit)
+        @Nonnull Collection<? extends Callable<T>> tasks, long timeout, 
@Nonnull TimeUnit unit)
         throws InterruptedException, ExecutionException, TimeoutException {
       return getService().invokeAny(tasks, timeout, unit);
     }
 
     @Override
-    public void execute(@NotNull Runnable command) {
+    public void execute(@Nonnull Runnable command) {
       getService().execute(command);
     }
 
diff --git 
a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java
 
b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java
index 42699f4662..c7716d3430 100644
--- 
a/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java
+++ 
b/spark/v3.5/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java
@@ -30,6 +30,7 @@ import java.io.IOException;
 import java.time.LocalDate;
 import java.util.List;
 import java.util.Set;
+import javax.annotation.Nonnull;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -92,7 +93,6 @@ import org.apache.spark.sql.catalyst.InternalRow;
 import org.apache.spark.sql.internal.SQLConf;
 import org.apache.spark.sql.types.StructType;
 import org.apache.spark.sql.vectorized.ColumnarBatch;
-import org.jetbrains.annotations.NotNull;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.BeforeAll;
@@ -737,7 +737,7 @@ public class TestSparkReaderDeletes extends DeleteReadTests 
{
     return set;
   }
 
-  @NotNull
+  @Nonnull
   private static List recordsWithDeletedColumn() {
     List records = Lists.newArrayList();
 
diff --git a/spark/v4.0/spark-runtime/LICENSE b/spark/v4.0/spark-runtime/LICENSE
index 1054b33f52..e36e0b6845 100644
--- a/spark/v4.0/spark-runtime/LICENSE
+++ b/spark/v4.0/spark-runtime/LICENSE
@@ -318,14 +318,6 @@ License: http://www.apache.org/licenses/LICENSE-2.0
 
 
--------------------------------------------------------------------------------
 
-This binary artifact contains JetBrains annotations.
-
-Copyright: 2000-2020 JetBrains s.r.o.
-Home page: https://github.com/JetBrains/java-annotations
-License: http://www.apache.org/licenses/LICENSE-2.0
-
---------------------------------------------------------------------------------
-
 This binary artifact contains code from Cloudera Kite.
 
 Copyright: 2013-2017 Cloudera Inc.
diff --git 
a/spark/v4.0/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java 
b/spark/v4.0/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java
index 7ef52606a6..4a0be2a7d6 100644
--- 
a/spark/v4.0/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java
+++ 
b/spark/v4.0/spark/src/main/java/org/apache/iceberg/spark/SparkTableUtil.java
@@ -39,6 +39,8 @@ import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 import java.util.stream.Collectors;
+import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.iceberg.AppendFiles;
@@ -105,8 +107,6 @@ import org.apache.spark.sql.catalyst.parser.ParseException;
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan;
 import org.apache.spark.sql.execution.datasources.v2.DataSourceV2Relation;
 import org.apache.spark.sql.util.CaseInsensitiveStringMap;
-import org.jetbrains.annotations.NotNull;
-import org.jetbrains.annotations.Nullable;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import scala.Function2;
@@ -1109,7 +1109,7 @@ public class SparkTableUtil {
       getService().shutdown();
     }
 
-    @NotNull
+    @Nonnull
     @Override
     public List<Runnable> shutdownNow() {
       return getService().shutdownNow();
@@ -1126,60 +1126,60 @@ public class SparkTableUtil {
     }
 
     @Override
-    public boolean awaitTermination(long timeout, @NotNull TimeUnit unit)
+    public boolean awaitTermination(long timeout, @Nonnull TimeUnit unit)
         throws InterruptedException {
       return getService().awaitTermination(timeout, unit);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public <T> Future<T> submit(@NotNull Callable<T> task) {
+    public <T> Future<T> submit(@Nonnull Callable<T> task) {
       return getService().submit(task);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public <T> Future<T> submit(@NotNull Runnable task, T result) {
+    public <T> Future<T> submit(@Nonnull Runnable task, T result) {
       return getService().submit(task, result);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public Future<?> submit(@NotNull Runnable task) {
+    public Future<?> submit(@Nonnull Runnable task) {
       return getService().submit(task);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public <T> List<Future<T>> invokeAll(@NotNull Collection<? extends 
Callable<T>> tasks)
+    public <T> List<Future<T>> invokeAll(@Nonnull Collection<? extends 
Callable<T>> tasks)
         throws InterruptedException {
       return getService().invokeAll(tasks);
     }
 
-    @NotNull
+    @Nonnull
     @Override
     public <T> List<Future<T>> invokeAll(
-        @NotNull Collection<? extends Callable<T>> tasks, long timeout, 
@NotNull TimeUnit unit)
+        @Nonnull Collection<? extends Callable<T>> tasks, long timeout, 
@Nonnull TimeUnit unit)
         throws InterruptedException {
       return getService().invokeAll(tasks, timeout, unit);
     }
 
-    @NotNull
+    @Nonnull
     @Override
-    public <T> T invokeAny(@NotNull Collection<? extends Callable<T>> tasks)
+    public <T> T invokeAny(@Nonnull Collection<? extends Callable<T>> tasks)
         throws InterruptedException, ExecutionException {
       return getService().invokeAny(tasks);
     }
 
     @Override
     public <T> T invokeAny(
-        @NotNull Collection<? extends Callable<T>> tasks, long timeout, 
@NotNull TimeUnit unit)
+        @Nonnull Collection<? extends Callable<T>> tasks, long timeout, 
@Nonnull TimeUnit unit)
         throws InterruptedException, ExecutionException, TimeoutException {
       return getService().invokeAny(tasks, timeout, unit);
     }
 
     @Override
-    public void execute(@NotNull Runnable command) {
+    public void execute(@Nonnull Runnable command) {
       getService().execute(command);
     }
 
diff --git 
a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java
 
b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java
index 42699f4662..c7716d3430 100644
--- 
a/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java
+++ 
b/spark/v4.0/spark/src/test/java/org/apache/iceberg/spark/source/TestSparkReaderDeletes.java
@@ -30,6 +30,7 @@ import java.io.IOException;
 import java.time.LocalDate;
 import java.util.List;
 import java.util.Set;
+import javax.annotation.Nonnull;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -92,7 +93,6 @@ import org.apache.spark.sql.catalyst.InternalRow;
 import org.apache.spark.sql.internal.SQLConf;
 import org.apache.spark.sql.types.StructType;
 import org.apache.spark.sql.vectorized.ColumnarBatch;
-import org.jetbrains.annotations.NotNull;
 import org.junit.jupiter.api.AfterAll;
 import org.junit.jupiter.api.AfterEach;
 import org.junit.jupiter.api.BeforeAll;
@@ -737,7 +737,7 @@ public class TestSparkReaderDeletes extends DeleteReadTests 
{
     return set;
   }
 
-  @NotNull
+  @Nonnull
   private static List recordsWithDeletedColumn() {
     List records = Lists.newArrayList();
 


Reply via email to