This is an automated email from the ASF dual-hosted git repository.

pabloem pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/beam.git


The following commit(s) were added to refs/heads/master by this push:
     new 09bbb48  [BEAM-11936] Fix errorprone UnusedVariable in IO (#16036)
09bbb48 is described below

commit 09bbb48187301f18bec6d9110741c69b955e2b5a
Author: Benjamin Gonzalez <74670721+benw...@users.noreply.github.com>
AuthorDate: Fri Dec 3 14:14:32 2021 -0600

    [BEAM-11936] Fix errorprone UnusedVariable in IO (#16036)
    
    * [BEAM-11936] Fix errorprone UnusedVariable in io
    
    * [BEAM-11936] Change todo issue ref, and spotlessApply
    
    * [BEAM-11936] Remove unused code
    
    * [BEAM-11936] Remove suppressUnusedVariable flag
    
    * [BEAM-11936] Remove unsued code after merge
---
 sdks/java/io/amazon-web-services/build.gradle      |  1 -
 sdks/java/io/amazon-web-services2/build.gradle     |  1 -
 .../apache/beam/sdk/io/aws2/kinesis/KinesisIO.java |  6 -----
 .../beam/sdk/io/aws2/s3/S3FileSystemTest.java      |  2 +-
 sdks/java/io/amqp/build.gradle                     |  2 +-
 .../java/org/apache/beam/sdk/io/amqp/AmqpIO.java   |  8 ++----
 .../beam/sdk/io/amqp/AmqpMessageCoderTest.java     |  2 +-
 sdks/java/io/azure/build.gradle                    |  1 -
 .../beam/sdk/io/azure/options/AzureModule.java     |  6 -----
 sdks/java/io/bigquery-io-perf-tests/build.gradle   |  2 +-
 .../beam/sdk/bigqueryioperftests/BigQueryIOIT.java |  4 ---
 sdks/java/io/cassandra/build.gradle                |  1 -
 .../apache/beam/sdk/io/cassandra/CassandraIO.java  |  1 -
 .../beam/sdk/io/cassandra/CassandraIOTest.java     |  1 -
 sdks/java/io/clickhouse/build.gradle               |  1 -
 sdks/java/io/common/build.gradle                   |  2 +-
 sdks/java/io/contextualtextio/build.gradle         |  1 -
 .../contextualtextio/ContextualTextIOSource.java   |  4 ---
 sdks/java/io/debezium/build.gradle                 |  1 -
 .../io/debezium/expansion-service/build.gradle     |  1 -
 .../io/debezium/DebeziumTransformRegistrar.java    |  3 ---
 .../apache/beam/io/debezium/SourceRecordJson.java  |  4 +--
 .../apache/beam/io/debezium/DebeziumIOTest.java    |  4 +--
 .../io/debezium/KafkaSourceConsumerFnTest.java     | 23 ++++++++---------
 .../elasticsearch-tests-5/build.gradle             |  1 -
 .../elasticsearch-tests-6/build.gradle             |  1 -
 .../elasticsearch-tests-7/build.gradle             |  1 -
 .../elasticsearch-tests-common/build.gradle        |  1 -
 sdks/java/io/elasticsearch/build.gradle            |  2 +-
 sdks/java/io/expansion-service/build.gradle        |  1 -
 sdks/java/io/file-based-io-tests/build.gradle      |  2 +-
 .../java/org/apache/beam/sdk/io/avro/AvroIOIT.java |  4 ---
 .../apache/beam/sdk/io/parquet/ParquetIOIT.java    |  4 ---
 .../java/org/apache/beam/sdk/io/text/TextIOIT.java |  7 -----
 .../apache/beam/sdk/io/tfrecord/TFRecordIOIT.java  |  4 ---
 .../java/org/apache/beam/sdk/io/xml/XmlIOIT.java   |  4 ---
 sdks/java/io/google-cloud-platform/build.gradle    |  1 -
 .../expansion-service/build.gradle                 |  1 -
 .../sdk/io/gcp/bigquery/BigQueryQuerySource.java   |  4 ---
 .../sdk/io/gcp/bigquery/BigQueryServicesImpl.java  |  3 ---
 .../gcp/bigquery/BigQueryStorageArrowReader.java   |  5 ----
 .../sdk/io/gcp/bigquery/BigQueryTableSource.java   |  3 ---
 .../beam/sdk/io/gcp/bigquery/CreateTables.java     |  6 -----
 .../beam/sdk/io/gcp/bigquery/StorageApiLoads.java  |  3 ---
 .../bigquery/StorageApiWritesShardedRecords.java   |  5 +++-
 .../beam/sdk/io/gcp/bigtable/BigtableIO.java       |  1 -
 .../apache/beam/sdk/io/gcp/healthcare/FhirIO.java  |  1 -
 .../io/gcp/healthcare/HttpHealthcareApiClient.java |  2 +-
 .../sdk/io/gcp/pubsub/PubsubUnboundedSink.java     |  4 ---
 .../beam/sdk/io/gcp/pubsub/TestPubsubSignal.java   |  9 ++-----
 .../sdk/io/gcp/spanner/MutationKeyEncoder.java     |  4 +--
 .../sdk/io/gcp/testing/FakeDatasetService.java     |  1 -
 .../io/gcp/bigquery/BigQueryIOStorageReadTest.java |  3 ---
 .../io/gcp/bigquery/BigQueryNestedRecordsIT.java   |  2 --
 .../io/gcp/bigquery/BigQueryServicesImplTest.java  |  4 ---
 .../sdk/io/gcp/bigquery/BigQueryToTableIT.java     |  5 ++--
 .../beam/sdk/io/gcp/bigtable/BigtableIOTest.java   |  2 --
 .../beam/sdk/io/gcp/datastore/DatastoreV1Test.java |  3 ---
 .../gcp/firestore/BaseFirestoreV1WriteFnTest.java  |  5 +---
 .../beam/sdk/io/gcp/healthcare/FhirIOLROIT.java    |  4 +--
 .../beam/sdk/io/gcp/healthcare/FhirIOTestUtil.java |  3 ---
 .../beam/sdk/io/gcp/pubsub/PubsubReadIT.java       |  3 ---
 .../sdk/io/gcp/spanner/ReadSpannerSchemaTest.java  |  2 --
 .../beam/sdk/io/gcp/spanner/SpannerIOReadTest.java | 30 ++++++++++------------
 .../sdk/io/gcp/spanner/SpannerIOWriteTest.java     |  1 -
 sdks/java/io/hadoop-common/build.gradle            |  2 +-
 sdks/java/io/hadoop-file-system/build.gradle       |  1 -
 sdks/java/io/hadoop-format/build.gradle            |  1 -
 .../sdk/io/hadoop/format/HadoopFormatIOIT.java     |  4 ---
 .../io/hadoop/format/HadoopFormatIOReadTest.java   |  3 ---
 sdks/java/io/hbase/build.gradle                    |  2 +-
 .../java/org/apache/beam/sdk/io/hbase/HBaseIO.java |  2 --
 sdks/java/io/hcatalog/build.gradle                 |  2 +-
 sdks/java/io/influxdb/build.gradle                 |  3 +--
 .../apache/beam/sdk/io/influxdb/InfluxDbIO.java    |  3 ---
 sdks/java/io/jdbc/build.gradle                     |  1 -
 .../java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java |  4 ---
 .../org/apache/beam/sdk/io/jdbc/JdbcIOTest.java    | 26 ++++++++-----------
 sdks/java/io/jms/build.gradle                      |  1 -
 sdks/java/io/kafka/build.gradle                    |  1 -
 .../beam/sdk/io/kafka/KafkaExactlyOnceSink.java    |  4 ++-
 .../sdk/io/kafka/WatchKafkaTopicPartitionDoFn.java |  5 +++-
 .../beam/sdk/io/kafka/KafkaIOExternalTest.java     |  3 +--
 sdks/java/io/kinesis/build.gradle                  |  2 +-
 .../java/io/kinesis/expansion-service/build.gradle |  1 -
 .../beam/sdk/io/kinesis/KinesisIOReadTest.java     |  2 --
 .../beam/sdk/io/kinesis/serde/AwsModuleTest.java   |  2 --
 sdks/java/io/kudu/build.gradle                     |  1 -
 .../java/org/apache/beam/sdk/io/kudu/KuduIO.java   |  3 ---
 .../java/org/apache/beam/sdk/io/kudu/KuduIOIT.java |  3 ---
 .../org/apache/beam/sdk/io/kudu/KuduIOTest.java    |  1 -
 sdks/java/io/mongodb/build.gradle                  |  2 +-
 .../org/apache/beam/sdk/io/mongodb/MongoDbIO.java  |  1 -
 .../apache/beam/sdk/io/mongodb/MongoDBIOIT.java    |  4 ---
 sdks/java/io/mqtt/build.gradle                     |  2 +-
 sdks/java/io/parquet/build.gradle                  |  1 -
 .../apache/beam/sdk/io/parquet/ParquetIOTest.java  |  1 -
 sdks/java/io/rabbitmq/build.gradle                 |  2 +-
 .../beam/sdk/io/rabbitmq/RabbitMqIOTest.java       |  3 ---
 sdks/java/io/redis/build.gradle                    |  2 +-
 sdks/java/io/snowflake/build.gradle                |  1 -
 .../io/snowflake/expansion-service/build.gradle    |  1 -
 .../apache/beam/sdk/io/snowflake/SnowflakeIO.java  |  3 +--
 .../services/SnowflakeBatchServiceImpl.java        |  3 ---
 .../services/SnowflakeStreamingServiceImpl.java    |  4 +--
 .../test/unit/DataSourceConfigurationTest.java     |  4 ---
 .../test/unit/write/SchemaDispositionTest.java     |  1 -
 .../test/unit/write/StreamingWriteTest.java        |  4 +--
 sdks/java/io/solr/build.gradle                     |  1 -
 sdks/java/io/splunk/build.gradle                   |  2 +-
 .../beam/sdk/io/splunk/SplunkEventWriter.java      |  3 ++-
 sdks/java/io/synthetic/build.gradle                |  1 -
 sdks/java/io/thrift/build.gradle                   |  1 -
 .../apache/beam/sdk/io/thrift/ThriftIOTest.java    |  6 ++---
 sdks/java/io/tika/build.gradle                     |  2 +-
 sdks/java/io/xml/build.gradle                      |  2 +-
 116 files changed, 87 insertions(+), 287 deletions(-)

diff --git a/sdks/java/io/amazon-web-services/build.gradle 
b/sdks/java/io/amazon-web-services/build.gradle
index 28ce7443..442b2ff 100644
--- a/sdks/java/io/amazon-web-services/build.gradle
+++ b/sdks/java/io/amazon-web-services/build.gradle
@@ -23,7 +23,6 @@ plugins {
 }
 
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.aws',
 )
 
diff --git a/sdks/java/io/amazon-web-services2/build.gradle 
b/sdks/java/io/amazon-web-services2/build.gradle
index 248a49d..fa0b97c 100644
--- a/sdks/java/io/amazon-web-services2/build.gradle
+++ b/sdks/java/io/amazon-web-services2/build.gradle
@@ -20,7 +20,6 @@ import groovy.json.JsonOutput
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.aws2',
 )
 provideIntegrationTestingDependencies()
diff --git 
a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java
 
b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java
index b636544..6c80310 100644
--- 
a/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java
+++ 
b/sdks/java/io/amazon-web-services2/src/main/java/org/apache/beam/sdk/io/aws2/kinesis/KinesisIO.java
@@ -30,8 +30,6 @@ import org.apache.beam.sdk.values.PCollection;
 import org.checkerframework.checker.nullness.qual.Nullable;
 import org.joda.time.Duration;
 import org.joda.time.Instant;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import software.amazon.awssdk.auth.credentials.AwsBasicCredentials;
 import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider;
 import software.amazon.awssdk.auth.credentials.StaticCredentialsProvider;
@@ -237,10 +235,6 @@ import 
software.amazon.kinesis.common.InitialPositionInStream;
 })
 public final class KinesisIO {
 
-  private static final Logger LOG = LoggerFactory.getLogger(KinesisIO.class);
-
-  private static final int DEFAULT_NUM_RETRIES = 6;
-
   /** Returns a new {@link Read} transform for reading from Kinesis. */
   public static Read read() {
     return new AutoValue_KinesisIO_Read.Builder()
diff --git 
a/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/s3/S3FileSystemTest.java
 
b/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/s3/S3FileSystemTest.java
index 4149e37..2a7814f 100644
--- 
a/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/s3/S3FileSystemTest.java
+++ 
b/sdks/java/io/amazon-web-services2/src/test/java/org/apache/beam/sdk/io/aws2/s3/S3FileSystemTest.java
@@ -181,7 +181,7 @@ public class S3FileSystemTest {
     HeadObjectResponse bigHeadObjectResponse =
         headObjectResponse.toBuilder().contentLength(5_368_709_120L).build();
     assertGetObjectHead(
-        s3FileSystem, createObjectHeadRequest(sourcePath, options), options, 
headObjectResponse);
+        s3FileSystem, createObjectHeadRequest(sourcePath, options), options, 
bigHeadObjectResponse);
 
     try {
       s3FileSystem.copy(sourcePath, destinationPath);
diff --git a/sdks/java/io/amqp/build.gradle b/sdks/java/io/amqp/build.gradle
index 0b6ca45..29dd7f6 100644
--- a/sdks/java/io/amqp/build.gradle
+++ b/sdks/java/io/amqp/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.amqp')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.amqp')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: AMQP"
 ext.summary = "IO to read and write using AMQP 1.0 protocol 
(http://www.amqp.org)."
diff --git 
a/sdks/java/io/amqp/src/main/java/org/apache/beam/sdk/io/amqp/AmqpIO.java 
b/sdks/java/io/amqp/src/main/java/org/apache/beam/sdk/io/amqp/AmqpIO.java
index c4c5460..6f6effd 100644
--- a/sdks/java/io/amqp/src/main/java/org/apache/beam/sdk/io/amqp/AmqpIO.java
+++ b/sdks/java/io/amqp/src/main/java/org/apache/beam/sdk/io/amqp/AmqpIO.java
@@ -327,19 +327,15 @@ public class AmqpIO {
 
     @Override
     public PDone expand(PCollection<Message> input) {
-      input.apply(ParDo.of(new WriteFn(this)));
+      input.apply(ParDo.of(new WriteFn()));
       return PDone.in(input.getPipeline());
     }
 
     private static class WriteFn extends DoFn<Message, Void> {
 
-      private final Write spec;
-
       private transient Messenger messenger;
 
-      public WriteFn(Write spec) {
-        this.spec = spec;
-      }
+      public WriteFn() {}
 
       @Setup
       public void setup() throws Exception {
diff --git 
a/sdks/java/io/amqp/src/test/java/org/apache/beam/sdk/io/amqp/AmqpMessageCoderTest.java
 
b/sdks/java/io/amqp/src/test/java/org/apache/beam/sdk/io/amqp/AmqpMessageCoderTest.java
index 6c5c039..548ccce 100644
--- 
a/sdks/java/io/amqp/src/test/java/org/apache/beam/sdk/io/amqp/AmqpMessageCoderTest.java
+++ 
b/sdks/java/io/amqp/src/test/java/org/apache/beam/sdk/io/amqp/AmqpMessageCoderTest.java
@@ -63,7 +63,7 @@ public class AmqpMessageCoderTest {
 
     AmqpMessageCoder coder = AmqpMessageCoder.of();
 
-    byte[] encoded = CoderUtils.encodeToByteArray(coder, message);
+    CoderUtils.encodeToByteArray(coder, message);
   }
 
   @Test
diff --git a/sdks/java/io/azure/build.gradle b/sdks/java/io/azure/build.gradle
index a2158e7..8f30a48 100644
--- a/sdks/java/io/azure/build.gradle
+++ b/sdks/java/io/azure/build.gradle
@@ -21,7 +21,6 @@ plugins {
 }
 
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.azure')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: Azure"
diff --git 
a/sdks/java/io/azure/src/main/java/org/apache/beam/sdk/io/azure/options/AzureModule.java
 
b/sdks/java/io/azure/src/main/java/org/apache/beam/sdk/io/azure/options/AzureModule.java
index ea538cf..d199b6a 100644
--- 
a/sdks/java/io/azure/src/main/java/org/apache/beam/sdk/io/azure/options/AzureModule.java
+++ 
b/sdks/java/io/azure/src/main/java/org/apache/beam/sdk/io/azure/options/AzureModule.java
@@ -51,7 +51,6 @@ import com.google.auto.service.AutoService;
 import java.io.IOException;
 import java.lang.reflect.Field;
 import java.util.Map;
-import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableSet;
 
 /**
  * A Jackson {@link Module} that registers a {@link JsonSerializer} and {@link 
JsonDeserializer} for
@@ -60,7 +59,6 @@ import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.Immutabl
 @AutoService(Module.class)
 public class AzureModule extends SimpleModule {
 
-  private static final String TYPE_PROPERTY = "@type";
   private static final String AZURE_CLIENT_ID = "azureClientId";
   private static final String AZURE_TENANT_ID = "azureTenantId";
   private static final String AZURE_CLIENT_SECRET = "azureClientSecret";
@@ -154,10 +152,6 @@ public class AzureModule extends SimpleModule {
   }
 
   private static class TokenCredentialSerializer extends 
JsonSerializer<TokenCredential> {
-    // These providers are singletons, so don't require any serialization, 
other than type.
-    // add any singleton credentials...
-    private static final ImmutableSet<Object> SINGLETON_CREDENTIAL_PROVIDERS = 
ImmutableSet.of();
-
     @Override
     public void serialize(
         TokenCredential tokenCredential,
diff --git a/sdks/java/io/bigquery-io-perf-tests/build.gradle 
b/sdks/java/io/bigquery-io-perf-tests/build.gradle
index 1c3909d..16df776 100644
--- a/sdks/java/io/bigquery-io-perf-tests/build.gradle
+++ b/sdks/java/io/bigquery-io-perf-tests/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, exportJavadoc: false, publish: 
false)
+applyJavaNature( exportJavadoc: false, publish: false)
 provideIntegrationTestingDependencies()
 enableJavaPerformanceTesting()
 
diff --git 
a/sdks/java/io/bigquery-io-perf-tests/src/test/java/org/apache/beam/sdk/bigqueryioperftests/BigQueryIOIT.java
 
b/sdks/java/io/bigquery-io-perf-tests/src/test/java/org/apache/beam/sdk/bigqueryioperftests/BigQueryIOIT.java
index ce0ec67..cc5dffe 100644
--- 
a/sdks/java/io/bigquery-io-perf-tests/src/test/java/org/apache/beam/sdk/bigqueryioperftests/BigQueryIOIT.java
+++ 
b/sdks/java/io/bigquery-io-perf-tests/src/test/java/org/apache/beam/sdk/bigqueryioperftests/BigQueryIOIT.java
@@ -85,8 +85,6 @@ public class BigQueryIOIT {
   private static final String READ_TIME_METRIC_NAME = "read_time";
   private static final String WRITE_TIME_METRIC_NAME = "write_time";
   private static final String AVRO_WRITE_TIME_METRIC_NAME = "avro_write_time";
-  private static String metricsBigQueryTable;
-  private static String metricsBigQueryDataset;
   private static String testBigQueryDataset;
   private static String testBigQueryTable;
   private static SyntheticSourceOptions sourceOptions;
@@ -102,8 +100,6 @@ public class BigQueryIOIT {
     tempRoot = options.getTempRoot();
     sourceOptions =
         SyntheticOptions.fromJsonString(options.getSourceOptions(), 
SyntheticSourceOptions.class);
-    metricsBigQueryDataset = options.getMetricsBigQueryDataset();
-    metricsBigQueryTable = options.getMetricsBigQueryTable();
     testBigQueryDataset = options.getTestBigQueryDataset();
     testBigQueryTable = options.getTestBigQueryTable();
     writeFormat = WriteFormat.valueOf(options.getWriteFormat());
diff --git a/sdks/java/io/cassandra/build.gradle 
b/sdks/java/io/cassandra/build.gradle
index 0fbd97d..94b584b 100644
--- a/sdks/java/io/cassandra/build.gradle
+++ b/sdks/java/io/cassandra/build.gradle
@@ -20,7 +20,6 @@ plugins { id 'org.apache.beam.module' }
 
 // Do not relocate guava to avoid issues with Cassandra's version.
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.cassandra',
 )
 provideIntegrationTestingDependencies()
diff --git 
a/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/CassandraIO.java
 
b/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/CassandraIO.java
index bd919dd..c337834 100644
--- 
a/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/CassandraIO.java
+++ 
b/sdks/java/io/cassandra/src/main/java/org/apache/beam/sdk/io/cassandra/CassandraIO.java
@@ -411,7 +411,6 @@ public class CassandraIO {
           @Element CassandraIO.Read<T> read, OutputReceiver<Read<T>> 
outputReceiver) {
         Set<RingRange> ringRanges = getRingRanges(read);
         for (RingRange rr : ringRanges) {
-          Set<RingRange> subset = ImmutableSet.<RingRange>of(rr);
           outputReceiver.output(read.withRingRanges(ImmutableSet.of(rr)));
         }
       }
diff --git 
a/sdks/java/io/cassandra/src/test/java/org/apache/beam/sdk/io/cassandra/CassandraIOTest.java
 
b/sdks/java/io/cassandra/src/test/java/org/apache/beam/sdk/io/cassandra/CassandraIOTest.java
index eb61c3a..83f28ed 100644
--- 
a/sdks/java/io/cassandra/src/test/java/org/apache/beam/sdk/io/cassandra/CassandraIOTest.java
+++ 
b/sdks/java/io/cassandra/src/test/java/org/apache/beam/sdk/io/cassandra/CassandraIOTest.java
@@ -98,7 +98,6 @@ public class CassandraIOTest implements Serializable {
   private static final String CASSANDRA_TABLE = "scientist";
   private static final Logger LOG = 
LoggerFactory.getLogger(CassandraIOTest.class);
   private static final String STORAGE_SERVICE_MBEAN = 
"org.apache.cassandra.db:type=StorageService";
-  private static final float ACCEPTABLE_EMPTY_SPLITS_PERCENTAGE = 0.5f;
   private static final int FLUSH_TIMEOUT = 30000;
   private static final int JMX_CONF_TIMEOUT = 1000;
   private static int jmxPort;
diff --git a/sdks/java/io/clickhouse/build.gradle 
b/sdks/java/io/clickhouse/build.gradle
index 1544714..3fd2abf 100644
--- a/sdks/java/io/clickhouse/build.gradle
+++ b/sdks/java/io/clickhouse/build.gradle
@@ -21,7 +21,6 @@ plugins {
   id 'ca.coglinc.javacc'
 }
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.clickhouse',
   generatedClassPatterns: [
     /^org\.apache\.beam\.sdk\.io\.clickhouse\.impl\.parser\..*/,
diff --git a/sdks/java/io/common/build.gradle b/sdks/java/io/common/build.gradle
index 96bfadc..a9d611b 100644
--- a/sdks/java/io/common/build.gradle
+++ b/sdks/java/io/common/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, exportJavadoc: false, 
automaticModuleName: 'org.apache.beam.sdk.io.common')
+applyJavaNature( exportJavadoc: false, automaticModuleName: 
'org.apache.beam.sdk.io.common')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: Common"
 ext.summary = "Code used by all Beam IOs"
diff --git a/sdks/java/io/contextualtextio/build.gradle 
b/sdks/java/io/contextualtextio/build.gradle
index e507522..340e994 100644
--- a/sdks/java/io/contextualtextio/build.gradle
+++ b/sdks/java/io/contextualtextio/build.gradle
@@ -18,7 +18,6 @@
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
         automaticModuleName: 'org.apache.beam.sdk.io.contextualtextio',
 )
 
diff --git 
a/sdks/java/io/contextualtextio/src/main/java/org/apache/beam/sdk/io/contextualtextio/ContextualTextIOSource.java
 
b/sdks/java/io/contextualtextio/src/main/java/org/apache/beam/sdk/io/contextualtextio/ContextualTextIOSource.java
index 2a95120..82ce3ac 100644
--- 
a/sdks/java/io/contextualtextio/src/main/java/org/apache/beam/sdk/io/contextualtextio/ContextualTextIOSource.java
+++ 
b/sdks/java/io/contextualtextio/src/main/java/org/apache/beam/sdk/io/contextualtextio/ContextualTextIOSource.java
@@ -34,8 +34,6 @@ import 
org.apache.beam.vendor.grpc.v1p36p0.com.google.protobuf.ByteString;
 import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
 import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions;
 import org.checkerframework.checker.nullness.qual.Nullable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * Implementation detail of {@link ContextualTextIO.Read}.
@@ -57,8 +55,6 @@ import org.slf4j.LoggerFactory;
 class ContextualTextIOSource extends FileBasedSource<Row> {
   byte[] delimiter;
 
-  private static final Logger LOG = 
LoggerFactory.getLogger(ContextualTextIOSource.class);
-
   // Used to Override isSplittable
   private boolean hasMultilineCSVRecords;
 
diff --git a/sdks/java/io/debezium/build.gradle 
b/sdks/java/io/debezium/build.gradle
index 9c7fc2b..29964fc 100644
--- a/sdks/java/io/debezium/build.gradle
+++ b/sdks/java/io/debezium/build.gradle
@@ -19,7 +19,6 @@ import groovy.json.JsonOutput
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
         automaticModuleName: 'org.apache.beam.sdk.io.debezium',
         mavenRepositories: [
                 [id: 'io.confluent', url: 
'https://packages.confluent.io/maven/']
diff --git a/sdks/java/io/debezium/expansion-service/build.gradle 
b/sdks/java/io/debezium/expansion-service/build.gradle
index 85b8c1f..a183c91 100644
--- a/sdks/java/io/debezium/expansion-service/build.gradle
+++ b/sdks/java/io/debezium/expansion-service/build.gradle
@@ -21,7 +21,6 @@ apply plugin: 'application'
 mainClassName = "org.apache.beam.sdk.expansion.service.ExpansionService"
 
 applyJavaNature(
-  suppressUnusedVariable: true,
         automaticModuleName: 
'org.apache.beam.sdk.io.debezium.expansion.service',
         exportJavadoc: false,
         validateShadowJar: false,
diff --git 
a/sdks/java/io/debezium/src/main/java/org/apache/beam/io/debezium/DebeziumTransformRegistrar.java
 
b/sdks/java/io/debezium/src/main/java/org/apache/beam/io/debezium/DebeziumTransformRegistrar.java
index 0eae9c9..13a099a 100644
--- 
a/sdks/java/io/debezium/src/main/java/org/apache/beam/io/debezium/DebeziumTransformRegistrar.java
+++ 
b/sdks/java/io/debezium/src/main/java/org/apache/beam/io/debezium/DebeziumTransformRegistrar.java
@@ -28,8 +28,6 @@ import org.apache.beam.sdk.values.PBegin;
 import org.apache.beam.sdk.values.PCollection;
 import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.collect.ImmutableMap;
 import org.checkerframework.checker.nullness.qual.Nullable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** Exposes {@link DebeziumIO.Read} as an external transform for 
cross-language usage. */
 @Experimental(Experimental.Kind.PORTABILITY)
@@ -38,7 +36,6 @@ import org.slf4j.LoggerFactory;
   "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
 })
 public class DebeziumTransformRegistrar implements ExternalTransformRegistrar {
-  private static final Logger LOG = 
LoggerFactory.getLogger(DebeziumTransformRegistrar.class);
   public static final String READ_JSON_URN = 
"beam:transform:org.apache.beam:debezium_read:v1";
 
   @Override
diff --git 
a/sdks/java/io/debezium/src/main/java/org/apache/beam/io/debezium/SourceRecordJson.java
 
b/sdks/java/io/debezium/src/main/java/org/apache/beam/io/debezium/SourceRecordJson.java
index e61884f..44460a5 100644
--- 
a/sdks/java/io/debezium/src/main/java/org/apache/beam/io/debezium/SourceRecordJson.java
+++ 
b/sdks/java/io/debezium/src/main/java/org/apache/beam/io/debezium/SourceRecordJson.java
@@ -61,7 +61,6 @@ import org.checkerframework.checker.nullness.qual.Nullable;
  */
 @SuppressWarnings({"nullness"})
 public class SourceRecordJson {
-  private final @Nullable SourceRecord sourceRecord;
   private final @Nullable Struct value;
   private final @Nullable Event event;
 
@@ -75,7 +74,6 @@ public class SourceRecordJson {
       throw new IllegalArgumentException();
     }
 
-    this.sourceRecord = sourceRecord;
     this.value = (Struct) sourceRecord.value();
 
     if (this.value == null) {
@@ -190,6 +188,7 @@ public class SourceRecordJson {
   }
 
   /** Depicts a SourceRecord as an Event in order for it to be mapped as JSON. 
*/
+  @SuppressWarnings("unused") // Unused params are validated as JSON
   static class Event implements Serializable {
     private final SourceRecordJson.Event.Metadata metadata;
     private final SourceRecordJson.Event.Before before;
@@ -222,6 +221,7 @@ public class SourceRecordJson {
     }
 
     /** Depicts the metadata within a SourceRecord. It has valuable fields. */
+    @SuppressWarnings("unused") // Unused params are validated as JSON
     static class Metadata implements Serializable {
       private final @Nullable String connector;
       private final @Nullable String version;
diff --git 
a/sdks/java/io/debezium/src/test/java/org/apache/beam/io/debezium/DebeziumIOTest.java
 
b/sdks/java/io/debezium/src/test/java/org/apache/beam/io/debezium/DebeziumIOTest.java
index ccf57b6..12d1d61 100644
--- 
a/sdks/java/io/debezium/src/test/java/org/apache/beam/io/debezium/DebeziumIOTest.java
+++ 
b/sdks/java/io/debezium/src/test/java/org/apache/beam/io/debezium/DebeziumIOTest.java
@@ -30,13 +30,11 @@ import org.apache.kafka.common.config.ConfigValue;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** Test on the DebeziumIO. */
 @RunWith(JUnit4.class)
 public class DebeziumIOTest implements Serializable {
-  private static final Logger LOG = 
LoggerFactory.getLogger(DebeziumIOTest.class);
+
   private static final ConnectorConfiguration MYSQL_CONNECTOR_CONFIGURATION =
       ConnectorConfiguration.create()
           .withUsername("debezium")
diff --git 
a/sdks/java/io/debezium/src/test/java/org/apache/beam/io/debezium/KafkaSourceConsumerFnTest.java
 
b/sdks/java/io/debezium/src/test/java/org/apache/beam/io/debezium/KafkaSourceConsumerFnTest.java
index c22f8a3..85ca71c 100644
--- 
a/sdks/java/io/debezium/src/test/java/org/apache/beam/io/debezium/KafkaSourceConsumerFnTest.java
+++ 
b/sdks/java/io/debezium/src/test/java/org/apache/beam/io/debezium/KafkaSourceConsumerFnTest.java
@@ -87,18 +87,17 @@ public class KafkaSourceConsumerFnTest implements 
Serializable {
 
     Pipeline pipeline = Pipeline.create();
 
-    PCollection<Integer> counts =
-        pipeline
-            .apply(
-                Create.of(Lists.newArrayList(config))
-                    .withCoder(MapCoder.of(StringUtf8Coder.of(), 
StringUtf8Coder.of())))
-            .apply(
-                ParDo.of(
-                    new KafkaSourceConsumerFn<>(
-                        CounterSourceConnector.class,
-                        sourceRecord -> (Integer) sourceRecord.value(),
-                        1)))
-            .setCoder(VarIntCoder.of());
+    pipeline
+        .apply(
+            Create.of(Lists.newArrayList(config))
+                .withCoder(MapCoder.of(StringUtf8Coder.of(), 
StringUtf8Coder.of())))
+        .apply(
+            ParDo.of(
+                new KafkaSourceConsumerFn<>(
+                    CounterSourceConnector.class,
+                    sourceRecord -> (Integer) sourceRecord.value(),
+                    1)))
+        .setCoder(VarIntCoder.of());
 
     pipeline.run().waitUntilFinish();
     Assert.assertEquals(3, CounterTask.getCountTasks());
diff --git 
a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-5/build.gradle 
b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-5/build.gradle
index 01e1750..6bccb6c 100644
--- a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-5/build.gradle
+++ b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-5/build.gradle
@@ -18,7 +18,6 @@
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
     publish: false,
     archivesBaseName: 'beam-sdks-java-io-elasticsearch-tests-5'
 )
diff --git 
a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-6/build.gradle 
b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-6/build.gradle
index 0f7115f..c4a97c4 100644
--- a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-6/build.gradle
+++ b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-6/build.gradle
@@ -18,7 +18,6 @@
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
     publish: false,
     archivesBaseName: 'beam-sdks-java-io-elasticsearch-tests-6'
 )
diff --git 
a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-7/build.gradle 
b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-7/build.gradle
index 513adfc..4d40504 100644
--- a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-7/build.gradle
+++ b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-7/build.gradle
@@ -18,7 +18,6 @@
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
     publish: false,
     archivesBaseName: 'beam-sdks-java-io-elasticsearch-tests-7'
 )
diff --git 
a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-common/build.gradle 
b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-common/build.gradle
index 6718d6c..6f470f2 100644
--- a/sdks/java/io/elasticsearch-tests/elasticsearch-tests-common/build.gradle
+++ b/sdks/java/io/elasticsearch-tests/elasticsearch-tests-common/build.gradle
@@ -18,7 +18,6 @@
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
     publish: false,
     archivesBaseName: 'beam-sdks-java-io-elasticsearch-tests-common'
 )
diff --git a/sdks/java/io/elasticsearch/build.gradle 
b/sdks/java/io/elasticsearch/build.gradle
index b7cf19f..c3bacdb 100644
--- a/sdks/java/io/elasticsearch/build.gradle
+++ b/sdks/java/io/elasticsearch/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.elasticsearch')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.elasticsearch')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: Elasticsearch"
 ext.summary = "IO to read and write on Elasticsearch"
diff --git a/sdks/java/io/expansion-service/build.gradle 
b/sdks/java/io/expansion-service/build.gradle
index 529ccef..c9aab1c 100644
--- a/sdks/java/io/expansion-service/build.gradle
+++ b/sdks/java/io/expansion-service/build.gradle
@@ -21,7 +21,6 @@ apply plugin: 'application'
 mainClassName = "org.apache.beam.sdk.expansion.service.ExpansionService"
 
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.expansion.service',
   exportJavadoc: false,
   validateShadowJar: false,
diff --git a/sdks/java/io/file-based-io-tests/build.gradle 
b/sdks/java/io/file-based-io-tests/build.gradle
index 6b5924f..0c46d80 100644
--- a/sdks/java/io/file-based-io-tests/build.gradle
+++ b/sdks/java/io/file-based-io-tests/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, exportJavadoc: false, publish: 
false)
+applyJavaNature( exportJavadoc: false, publish: false)
 provideIntegrationTestingDependencies()
 enableJavaPerformanceTesting()
 
diff --git 
a/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/avro/AvroIOIT.java
 
b/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/avro/AvroIOIT.java
index 8789e35..f9f6cb8 100644
--- 
a/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/avro/AvroIOIT.java
+++ 
b/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/avro/AvroIOIT.java
@@ -93,8 +93,6 @@ public class AvroIOIT {
                   + "}");
 
   private static String filenamePrefix;
-  private static String bigQueryDataset;
-  private static String bigQueryTable;
   private static final String AVRO_NAMESPACE = AvroIOIT.class.getName();
   private static Integer numberOfTextLines;
   private static Integer datasetSize;
@@ -108,8 +106,6 @@ public class AvroIOIT {
     FileBasedIOTestPipelineOptions options = 
readFileBasedIOITPipelineOptions();
 
     filenamePrefix = appendTimestampSuffix(options.getFilenamePrefix());
-    bigQueryDataset = options.getBigQueryDataset();
-    bigQueryTable = options.getBigQueryTable();
     datasetSize = options.getDatasetSize();
     expectedHash = options.getExpectedHash();
     numberOfTextLines = options.getNumberOfRecords();
diff --git 
a/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/parquet/ParquetIOIT.java
 
b/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/parquet/ParquetIOIT.java
index 600652c..8fb39b4 100644
--- 
a/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/parquet/ParquetIOIT.java
+++ 
b/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/parquet/ParquetIOIT.java
@@ -93,8 +93,6 @@ public class ParquetIOIT {
                   + "}");
 
   private static String filenamePrefix;
-  private static String bigQueryDataset;
-  private static String bigQueryTable;
   private static Integer numberOfTextLines;
   private static Integer datasetSize;
   private static String expectedHash;
@@ -110,8 +108,6 @@ public class ParquetIOIT {
     datasetSize = options.getDatasetSize();
     expectedHash = options.getExpectedHash();
     filenamePrefix = appendTimestampSuffix(options.getFilenamePrefix());
-    bigQueryDataset = options.getBigQueryDataset();
-    bigQueryTable = options.getBigQueryTable();
     settings =
         InfluxDBSettings.builder()
             .withHost(options.getInfluxHost())
diff --git 
a/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/text/TextIOIT.java
 
b/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/text/TextIOIT.java
index 8fe65dd..5b3a4f5 100644
--- 
a/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/text/TextIOIT.java
+++ 
b/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/text/TextIOIT.java
@@ -52,8 +52,6 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * Integration tests for {@link org.apache.beam.sdk.io.TextIO}.
@@ -78,7 +76,6 @@ import org.slf4j.LoggerFactory;
  */
 @RunWith(JUnit4.class)
 public class TextIOIT {
-  private static final Logger LOG = LoggerFactory.getLogger(TextIOIT.class);
 
   private static String filenamePrefix;
   private static Integer numberOfTextLines;
@@ -86,8 +83,6 @@ public class TextIOIT {
   private static String expectedHash;
   private static Compression compressionType;
   private static Integer numShards;
-  private static String bigQueryDataset;
-  private static String bigQueryTable;
   private static boolean gatherGcsPerformanceMetrics;
   private static InfluxDBSettings settings;
   private static final String FILEIOIT_NAMESPACE = TextIOIT.class.getName();
@@ -103,8 +98,6 @@ public class TextIOIT {
     compressionType = Compression.valueOf(options.getCompressionType());
     filenamePrefix = appendTimestampSuffix(options.getFilenamePrefix());
     numShards = options.getNumberOfShards();
-    bigQueryDataset = options.getBigQueryDataset();
-    bigQueryTable = options.getBigQueryTable();
     gatherGcsPerformanceMetrics = options.getReportGcsPerformanceMetrics();
     settings =
         InfluxDBSettings.builder()
diff --git 
a/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/tfrecord/TFRecordIOIT.java
 
b/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/tfrecord/TFRecordIOIT.java
index 09cb467..38e4c8c 100644
--- 
a/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/tfrecord/TFRecordIOIT.java
+++ 
b/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/tfrecord/TFRecordIOIT.java
@@ -88,8 +88,6 @@ public class TFRecordIOIT {
   private static final String RUN_TIME = "run_time";
 
   private static String filenamePrefix;
-  private static String bigQueryDataset;
-  private static String bigQueryTable;
   private static Integer numberOfTextLines;
   private static Integer datasetSize;
   private static String expectedHash;
@@ -108,8 +106,6 @@ public class TFRecordIOIT {
     numberOfTextLines = options.getNumberOfRecords();
     compressionType = Compression.valueOf(options.getCompressionType());
     filenamePrefix = appendTimestampSuffix(options.getFilenamePrefix());
-    bigQueryDataset = options.getBigQueryDataset();
-    bigQueryTable = options.getBigQueryTable();
     settings =
         InfluxDBSettings.builder()
             .withHost(options.getInfluxHost())
diff --git 
a/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/xml/XmlIOIT.java
 
b/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/xml/XmlIOIT.java
index 7b5f401..bd4d254 100644
--- 
a/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/xml/XmlIOIT.java
+++ 
b/sdks/java/io/file-based-io-tests/src/test/java/org/apache/beam/sdk/io/xml/XmlIOIT.java
@@ -93,8 +93,6 @@ public class XmlIOIT {
   }
 
   private static String filenamePrefix;
-  private static String bigQueryDataset;
-  private static String bigQueryTable;
   private static Integer numberOfTextLines;
   private static Integer datasetSize;
   private static String expectedHash;
@@ -111,8 +109,6 @@ public class XmlIOIT {
     XmlIOITPipelineOptions options = 
readIOTestPipelineOptions(XmlIOITPipelineOptions.class);
     filenamePrefix = appendTimestampSuffix(options.getFilenamePrefix());
     charset = Charset.forName(options.getCharset());
-    bigQueryDataset = options.getBigQueryDataset();
-    bigQueryTable = options.getBigQueryTable();
     datasetSize = options.getDatasetSize();
     expectedHash = options.getExpectedHash();
     numberOfTextLines = options.getNumberOfRecords();
diff --git a/sdks/java/io/google-cloud-platform/build.gradle 
b/sdks/java/io/google-cloud-platform/build.gradle
index 51f5f7f..d16fb16 100644
--- a/sdks/java/io/google-cloud-platform/build.gradle
+++ b/sdks/java/io/google-cloud-platform/build.gradle
@@ -20,7 +20,6 @@ import groovy.json.JsonOutput
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.gcp',
   enableSpotbugs: false,
   classesTriggerCheckerBugs: [
diff --git a/sdks/java/io/google-cloud-platform/expansion-service/build.gradle 
b/sdks/java/io/google-cloud-platform/expansion-service/build.gradle
index 0052d42..dea464b 100644
--- a/sdks/java/io/google-cloud-platform/expansion-service/build.gradle
+++ b/sdks/java/io/google-cloud-platform/expansion-service/build.gradle
@@ -21,7 +21,6 @@ apply plugin: 'application'
 mainClassName = "org.apache.beam.sdk.expansion.service.ExpansionService"
 
 applyJavaNature(
-  suppressUnusedVariable: true,
         automaticModuleName: 'org.apache.beam.sdk.io.gcp.expansion.service',
         exportJavadoc: false,
         validateShadowJar: false,
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryQuerySource.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryQuerySource.java
index 40559f6..4ca2c64 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryQuerySource.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryQuerySource.java
@@ -24,15 +24,11 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.SerializableFunction;
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** A {@link BigQuerySourceBase} for querying BigQuery tables. */
 @VisibleForTesting
 class BigQueryQuerySource<T> extends BigQuerySourceBase<T> {
 
-  private static final Logger LOG = 
LoggerFactory.getLogger(BigQueryQuerySource.class);
-
   static <T> BigQueryQuerySource<T> create(
       String stepUuid,
       BigQueryQuerySourceDef queryDef,
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java
index a62b6fc..5a270d4 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImpl.java
@@ -145,9 +145,6 @@ import org.slf4j.LoggerFactory;
 class BigQueryServicesImpl implements BigQueryServices {
   private static final Logger LOG = 
LoggerFactory.getLogger(BigQueryServicesImpl.class);
 
-  // How frequently to log while polling.
-  private static final Duration POLLING_LOG_GAP = Duration.standardMinutes(10);
-
   // The maximum number of retries to execute a BigQuery RPC.
   private static final int MAX_RPC_RETRIES = 9;
 
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageArrowReader.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageArrowReader.java
index 323b69c..92d23bf 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageArrowReader.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryStorageArrowReader.java
@@ -34,7 +34,6 @@ import org.apache.beam.sdk.values.Row;
 @SuppressWarnings("nullness")
 class BigQueryStorageArrowReader implements BigQueryStorageReader {
 
-  private org.apache.beam.sdk.schemas.Schema arrowBeamSchema;
   private @Nullable RecordBatchRowIterator recordBatchIterator;
   private long rowCount;
   private ArrowSchema protoSchema;
@@ -42,10 +41,6 @@ class BigQueryStorageArrowReader implements 
BigQueryStorageReader {
 
   BigQueryStorageArrowReader(ReadSession readSession) throws IOException {
     protoSchema = readSession.getArrowSchema();
-    InputStream input = protoSchema.getSerializedSchema().newInput();
-    this.arrowBeamSchema =
-        ArrowConversion.ArrowSchemaTranslator.toBeamSchema(
-            ArrowConversion.arrowSchemaFromInput(input));
     this.rowCount = 0;
     this.alloc = null;
   }
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableSource.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableSource.java
index e2e2bb4..86aa87e 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableSource.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryTableSource.java
@@ -26,8 +26,6 @@ import org.apache.beam.sdk.options.PipelineOptions;
 import org.apache.beam.sdk.transforms.SerializableFunction;
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** A {@link BigQuerySourceBase} for reading BigQuery tables. */
 @VisibleForTesting
@@ -35,7 +33,6 @@ import org.slf4j.LoggerFactory;
   "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
 })
 class BigQueryTableSource<T> extends BigQuerySourceBase<T> {
-  private static final Logger LOG = 
LoggerFactory.getLogger(BigQueryTableSource.class);
 
   static <T> BigQueryTableSource<T> create(
       String stepUuid,
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/CreateTables.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/CreateTables.java
index d9bb6dd..8eef067 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/CreateTables.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/CreateTables.java
@@ -20,11 +20,8 @@ package org.apache.beam.sdk.io.gcp.bigquery;
 import static 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.base.Preconditions.checkArgument;
 
 import com.google.api.services.bigquery.model.TableSchema;
-import java.util.Collections;
 import java.util.List;
 import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
 import org.apache.beam.sdk.io.gcp.bigquery.BigQueryIO.Write.CreateDisposition;
 import org.apache.beam.sdk.transforms.DoFn;
 import org.apache.beam.sdk.transforms.PTransform;
@@ -57,9 +54,6 @@ public class CreateTables<DestinationT, ElementT>
    *
    * <p>TODO: We should put a bound on memory usage of this. Use guava cache 
instead.
    */
-  private static Set<String> createdTables =
-      Collections.newSetFromMap(new ConcurrentHashMap<String, Boolean>());
-
   public CreateTables(
       CreateDisposition createDisposition,
       DynamicDestinations<?, DestinationT> dynamicDestinations) {
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiLoads.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiLoads.java
index 2d079e3..8d12556 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiLoads.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiLoads.java
@@ -38,13 +38,10 @@ import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.TupleTag;
 import org.apache.beam.sdk.values.TypeDescriptor;
 import org.joda.time.Duration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** This {@link PTransform} manages loads into BigQuery using the Storage API. 
*/
 public class StorageApiLoads<DestinationT, ElementT>
     extends PTransform<PCollection<KV<DestinationT, ElementT>>, WriteResult> {
-  private static final Logger LOG = 
LoggerFactory.getLogger(StorageApiLoads.class);
   static final int MAX_BATCH_SIZE_BYTES = 2 * 1024 * 1024;
 
   private final Coder<DestinationT> destinationCoder;
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWritesShardedRecords.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWritesShardedRecords.java
index aacab97..fde0085 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWritesShardedRecords.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigquery/StorageApiWritesShardedRecords.java
@@ -84,7 +84,10 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /** A transform to write sharded records to BigQuery using the Storage API. */
-@SuppressWarnings("FutureReturnValueIgnored")
+@SuppressWarnings({
+  "FutureReturnValueIgnored",
+  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is 
released (2.11.0)
+})
 public class StorageApiWritesShardedRecords<DestinationT, ElementT>
     extends PTransform<
         PCollection<KV<ShardedKey<DestinationT>, Iterable<byte[]>>>, 
PCollection<Void>> {
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
index f9df078..8fbadf2 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIO.java
@@ -912,7 +912,6 @@ public class BigtableIO {
     private final BigtableConfig config;
     private final BigtableReadOptions readOptions;
     private @Nullable Long estimatedSizeBytes;
-    private transient @Nullable List<SampleRowKeysResponse> sampleRowKeys;
 
     /** Creates a new {@link BigtableSource} with just one {@link 
ByteKeyRange}. */
     protected BigtableSource withSingleRange(ByteKeyRange range) {
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIO.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIO.java
index 7e7d351..f36d943 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIO.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIO.java
@@ -924,7 +924,6 @@ public class FhirIO {
 
     @Override
     public Result expand(PCollection<String> input) {
-      PCollectionTuple bundles;
       switch (this.getWriteMethod()) {
         case IMPORT:
           LOG.warn(
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HttpHealthcareApiClient.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HttpHealthcareApiClient.java
index 13dfe28..1b09ff4 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HttpHealthcareApiClient.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/healthcare/HttpHealthcareApiClient.java
@@ -250,7 +250,7 @@ public class HttpHealthcareApiClient implements 
HealthcareApiClient, Serializabl
             .setEntity(requestEntity)
             .addHeader("Content-Type", "application/dicom")
             .build();
-    HttpResponse response = httpClient.execute(request);
+    httpClient.execute(request);
     return new Empty();
   }
 
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSink.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSink.java
index 038dc6f..fa182d2 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSink.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubUnboundedSink.java
@@ -24,7 +24,6 @@ import java.io.InputStream;
 import java.io.OutputStream;
 import java.util.ArrayList;
 import java.util.List;
-import java.util.Map;
 import java.util.UUID;
 import java.util.concurrent.ThreadLocalRandom;
 import org.apache.beam.sdk.coders.AtomicCoder;
@@ -33,7 +32,6 @@ import org.apache.beam.sdk.coders.ByteArrayCoder;
 import org.apache.beam.sdk.coders.Coder;
 import org.apache.beam.sdk.coders.CoderException;
 import org.apache.beam.sdk.coders.KvCoder;
-import org.apache.beam.sdk.coders.MapCoder;
 import org.apache.beam.sdk.coders.NullableCoder;
 import org.apache.beam.sdk.coders.StringUtf8Coder;
 import org.apache.beam.sdk.coders.VarIntCoder;
@@ -100,8 +98,6 @@ public class PubsubUnboundedSink extends 
PTransform<PCollection<PubsubMessage>,
   private static class OutgoingMessageCoder extends 
AtomicCoder<OutgoingMessage> {
     private static final NullableCoder<String> RECORD_ID_CODER =
         NullableCoder.of(StringUtf8Coder.of());
-    private static final NullableCoder<Map<String, String>> ATTRIBUTES_CODER =
-        NullableCoder.of(MapCoder.of(StringUtf8Coder.of(), 
StringUtf8Coder.of()));
 
     @Override
     public void encode(OutgoingMessage value, OutputStream outStream)
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/TestPubsubSignal.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/TestPubsubSignal.java
index 12c879b..27567a4 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/TestPubsubSignal.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/pubsub/TestPubsubSignal.java
@@ -74,7 +74,8 @@ import org.slf4j.LoggerFactory;
  * <p>Uses a random temporary Pubsub topic for synchronization.
  */
 @SuppressWarnings({
-  "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
+  "nullness", // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
+  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is 
released (2.11.0)
 })
 public class TestPubsubSignal implements TestRule {
   private static final Logger LOG = 
LoggerFactory.getLogger(TestPubsubSignal.class);
@@ -82,10 +83,6 @@ public class TestPubsubSignal implements TestRule {
   private static final String RESULT_SUCCESS_MESSAGE = "SUCCESS";
   private static final String START_TOPIC_NAME = "start";
   private static final String START_SIGNAL_MESSAGE = "START SIGNAL";
-  private static final Integer DEFAULT_ACK_DEADLINE_SECONDS = 60;
-
-  private static final String NO_ID_ATTRIBUTE = null;
-  private static final String NO_TIMESTAMP_ATTRIBUTE = null;
 
   private final TestPubsubOptions pipelineOptions;
   private final String pubsubEndpoint;
@@ -404,7 +401,6 @@ public class TestPubsubSignal implements TestRule {
    * "FAILURE".
    */
   static class StatefulPredicateCheck<T> extends DoFn<KV<String, ? extends T>, 
String> {
-    private final SerializableFunction<T, String> formatter;
     private SerializableFunction<Set<T>, Boolean> successPredicate;
     // keep all events seen so far in the state cell
 
@@ -418,7 +414,6 @@ public class TestPubsubSignal implements TestRule {
         SerializableFunction<T, String> formatter,
         SerializableFunction<Set<T>, Boolean> successPredicate) {
       this.seenEvents = StateSpecs.bag(coder);
-      this.formatter = formatter;
       this.successPredicate = successPredicate;
     }
 
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/MutationKeyEncoder.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/MutationKeyEncoder.java
index 77d84e9..5e68d55 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/MutationKeyEncoder.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/spanner/MutationKeyEncoder.java
@@ -39,15 +39,13 @@ import 
org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.Visi
 import org.joda.time.DateTime;
 import org.joda.time.Days;
 import org.joda.time.MutableDateTime;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * Given the Schema, Encodes the table name and Key into a lexicographically 
sortable {@code
  * byte[]}.
  */
 class MutationKeyEncoder {
-  private static final Logger LOG = 
LoggerFactory.getLogger(MutationKeyEncoder.class);
+
   private static final int ROWS_PER_UNKNOWN_TABLE_LOG_MESSAGE = 10000;
   private static final DateTime MIN_DATE = new DateTime(1, 1, 1, 0, 0);
   private final SpannerSchema schema;
diff --git 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/testing/FakeDatasetService.java
 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/testing/FakeDatasetService.java
index bb3ef9f..239c2ad 100644
--- 
a/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/testing/FakeDatasetService.java
+++ 
b/sdks/java/io/google-cloud-platform/src/main/java/org/apache/beam/sdk/io/gcp/testing/FakeDatasetService.java
@@ -188,7 +188,6 @@ public class FakeDatasetService implements DatasetService, 
Serializable {
   public List<TableRow> getAllRows(String projectId, String datasetId, String 
tableId)
       throws InterruptedException, IOException {
     synchronized (tables) {
-      TableContainer tableContainer = getTableContainer(projectId, datasetId, 
tableId);
       return getTableContainer(projectId, datasetId, tableId).getRows();
     }
   }
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadTest.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadTest.java
index 135a536..fcf335c 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadTest.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryIOStorageReadTest.java
@@ -842,7 +842,6 @@ public class BigQueryIOStorageReadTest {
             TableRowJsonCoder.of(),
             new FakeBigQueryServices().withStorageClient(fakeStorageClient));
 
-    List<TableRow> rows = new ArrayList<>();
     BoundedReader<TableRow> reader = streamSource.createReader(options);
 
     // Before call to BoundedReader#start, fraction consumed must be zero.
@@ -928,7 +927,6 @@ public class BigQueryIOStorageReadTest {
             TableRowJsonCoder.of(),
             new FakeBigQueryServices().withStorageClient(fakeStorageClient));
 
-    List<TableRow> rows = new ArrayList<>();
     BoundedReader<TableRow> reader = streamSource.createReader(options);
 
     // Before call to BoundedReader#start, fraction consumed must be zero.
@@ -1618,7 +1616,6 @@ public class BigQueryIOStorageReadTest {
             TableRowJsonCoder.of(),
             new FakeBigQueryServices().withStorageClient(fakeStorageClient));
 
-    List<TableRow> rows = new ArrayList<>();
     BoundedReader<TableRow> reader = streamSource.createReader(options);
 
     // Before call to BoundedReader#start, fraction consumed must be zero.
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryNestedRecordsIT.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryNestedRecordsIT.java
index 872faa2..698ef66 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryNestedRecordsIT.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryNestedRecordsIT.java
@@ -41,8 +41,6 @@ public class BigQueryNestedRecordsIT {
   private static final String UNFLATTENABLE_QUERY =
       "SELECT * FROM [apache-beam-testing:big_query_nested_test.genomics_2]";
 
-  private static Integer stringifyCount = 0;
-
   /** Options supported by this class. */
   public interface Options extends PipelineOptions {
 
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImplTest.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImplTest.java
index 16c40cf..4ea3d97 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImplTest.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryServicesImplTest.java
@@ -1043,8 +1043,6 @@ public class BigQueryServicesImplTest {
     List<FailsafeValueInSingleWindow<TableRow, TableRow>> rows = new 
ArrayList<>();
     rows.add(wrapValue(new TableRow()));
 
-    final TableDataInsertAllResponse allRowsSucceeded =
-        new TableDataInsertAllResponse().setInsertErrors(ImmutableList.of());
     // First response is 403 non-{rate-limited, quota-exceeded}, second 
response has valid payload
     // but should not be invoked.
     setupMockResponses(
@@ -1318,8 +1316,6 @@ public class BigQueryServicesImplTest {
 
   @Test
   public void testGetErrorInfo() throws IOException {
-    DatasetServiceImpl dataService =
-        new DatasetServiceImpl(bigquery, null, 
PipelineOptionsFactory.create());
     ErrorInfo info = new ErrorInfo();
     List<ErrorInfo> infoList = new ArrayList<>();
     infoList.add(info);
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryToTableIT.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryToTableIT.java
index 34bf8a0..4f4354f 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryToTableIT.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigquery/BigQueryToTableIT.java
@@ -185,9 +185,8 @@ public class BigQueryToTableIT {
             "abc=,2000-01-01,00:00:00",
             "dec=,3000-12-31,23:59:59.990000",
             "xyw=,2011-01-01,23:59:59.999999");
-    QueryResponse response =
-        BQ_CLIENT.queryWithRetries(
-            String.format("SELECT bytes, date, time FROM [%s];", outputTable), 
project);
+    BQ_CLIENT.queryWithRetries(
+        String.format("SELECT bytes, date, time FROM [%s];", outputTable), 
project);
     List<TableRow> tableRows =
         getTableRowsFromQuery(
             String.format("SELECT bytes, date, time FROM [%s];", outputTable), 
MAX_RETRY);
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIOTest.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIOTest.java
index abe2749..eb7429f 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIOTest.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/bigtable/BigtableIOTest.java
@@ -1284,8 +1284,6 @@ public class BigtableIOTest {
   @Test
   public void testWritingEmitsResultsWhenDoneInFixedWindow() throws Exception {
     final String table = "table";
-    final String key = "key";
-    final String value = "value";
 
     service.createTable(table);
 
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java
index 9c42a45..8371582 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/datastore/DatastoreV1Test.java
@@ -138,9 +138,6 @@ public class DatastoreV1Test {
   public void setUp() {
     MockitoAnnotations.initMocks(this);
 
-    DatastoreV1.Read initialRead =
-        
DatastoreIO.v1().read().withProjectId(PROJECT_ID).withQuery(QUERY).withNamespace(NAMESPACE);
-
     when(mockDatastoreFactory.getDatastore(any(PipelineOptions.class), 
any(String.class), any()))
         .thenReturn(mockDatastore);
     
when(mockDatastoreFactory.getQuerySplitter()).thenReturn(mockQuerySplitter);
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/BaseFirestoreV1WriteFnTest.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/BaseFirestoreV1WriteFnTest.java
index 727fdc0..16c90af 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/BaseFirestoreV1WriteFnTest.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/firestore/BaseFirestoreV1WriteFnTest.java
@@ -410,7 +410,7 @@ public abstract class BaseFirestoreV1WriteFnTest<
                 LOG.debug("request = {}", request);
                 long requestDurationMs = 0;
                 BatchWriteResponse.Builder builder = 
BatchWriteResponse.newBuilder();
-                for (Write w : request.getWritesList()) {
+                for (Write ignored : request.getWritesList()) {
                   builder.addWriteResults(WriteResult.newBuilder().build());
                   if (clock.prev.isBefore(threshold)) {
                     requestDurationMs += defaultDocumentWriteLatency;
@@ -853,19 +853,16 @@ public abstract class BaseFirestoreV1WriteFnTest<
   private static class MyCounter implements Counter {
     private final MetricName named;
 
-    private long incInvocationCount;
     private final List<Long> incInvocations;
 
     public MyCounter(String namespace, String name) {
       named = MetricName.named(namespace, name);
-      incInvocationCount = 0;
       incInvocations = new ArrayList<>();
     }
 
     @Override
     public void inc() {
       LOG.trace("{} {}:inc()", METRIC_MARKER, named);
-      incInvocationCount++;
     }
 
     @Override
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIOLROIT.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIOLROIT.java
index 5ba4f7c..ddadf57 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIOLROIT.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIOLROIT.java
@@ -24,7 +24,6 @@ import 
com.google.api.services.healthcare.v1.model.DeidentifyConfig;
 import java.io.IOException;
 import java.security.SecureRandom;
 import org.apache.beam.sdk.testing.TestPipeline;
-import org.apache.beam.sdk.values.PCollection;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
@@ -92,8 +91,7 @@ public class FhirIOLROIT {
     String fhirStoreName = healthcareDataset + "/fhirStores/" + fhirStoreId;
     String exportGcsUriPrefix =
         "gs://" + DEFAULT_TEMP_BUCKET + "/export/" + new 
SecureRandom().nextInt(32);
-    PCollection<String> resources =
-        pipeline.apply(FhirIO.exportResourcesToGcs(fhirStoreName, 
exportGcsUriPrefix));
+    pipeline.apply(FhirIO.exportResourcesToGcs(fhirStoreName, 
exportGcsUriPrefix));
     pipeline.run();
   }
 
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIOTestUtil.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIOTestUtil.java
index 6f2a6ce..c5c2c9e 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIOTestUtil.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/healthcare/FhirIOTestUtil.java
@@ -17,7 +17,6 @@
  */
 package org.apache.beam.sdk.io.gcp.healthcare;
 
-import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.api.client.http.HttpHeaders;
 import com.google.api.client.http.HttpRequestInitializer;
 import com.google.api.client.http.javanet.NetHttpTransport;
@@ -46,11 +45,9 @@ import java.util.stream.Stream;
 import 
org.apache.beam.sdk.io.gcp.healthcare.HttpHealthcareApiClient.HealthcareHttpException;
 
 class FhirIOTestUtil {
-  private static final ObjectMapper mapper = new ObjectMapper();
   public static final String DEFAULT_TEMP_BUCKET = 
"temp-storage-for-healthcare-io-tests";
 
   private static Stream<String> readPrettyBundles(String version) {
-    ClassLoader classLoader = FhirIOTestUtil.class.getClassLoader();
     Path resourceDir = Paths.get("build", "resources", "test", version);
     String absolutePath = resourceDir.toFile().getAbsolutePath();
     File dir = new File(absolutePath);
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubReadIT.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubReadIT.java
index 66b4582..7b370eb 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubReadIT.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/pubsub/PubsubReadIT.java
@@ -30,13 +30,10 @@ import org.junit.Rule;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** Integration test for PubsubIO. */
 @RunWith(JUnit4.class)
 public class PubsubReadIT {
-  private static final Logger LOG = 
LoggerFactory.getLogger(PubsubReadIT.class);
 
   @Rule public transient TestPubsubSignal signal = TestPubsubSignal.create();
   @Rule public transient TestPipeline pipeline = TestPipeline.create();
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/ReadSpannerSchemaTest.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/ReadSpannerSchemaTest.java
index 306039a..33888cd 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/ReadSpannerSchemaTest.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/ReadSpannerSchemaTest.java
@@ -47,7 +47,6 @@ public class ReadSpannerSchemaTest {
   @Rule public final transient ExpectedException thrown = 
ExpectedException.none();
 
   private FakeServiceFactory serviceFactory;
-  private ReadOnlyTransaction mockTx;
 
   private static Struct columnMetadata(String tableName, String columnName, 
String type) {
     return Struct.newBuilder()
@@ -122,7 +121,6 @@ public class ReadSpannerSchemaTest {
   @SuppressWarnings("unchecked")
   public void setUp() throws Exception {
     serviceFactory = new FakeServiceFactory();
-    mockTx = mock(ReadOnlyTransaction.class);
   }
 
   @Test
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOReadTest.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOReadTest.java
index 3c886d1..8e1c833 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOReadTest.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOReadTest.java
@@ -288,14 +288,13 @@ public class SpannerIOReadTest implements Serializable {
 
     SpannerConfig spannerConfig = getSpannerConfig();
 
-    PCollection<Struct> one =
-        pipeline.apply(
-            "read q",
-            SpannerIO.read()
-                .withSpannerConfig(spannerConfig)
-                .withQuery("SELECT * FROM users")
-                .withQueryName("queryName")
-                .withTimestampBound(timestampBound));
+    pipeline.apply(
+        "read q",
+        SpannerIO.read()
+            .withSpannerConfig(spannerConfig)
+            .withQuery("SELECT * FROM users")
+            .withQueryName("queryName")
+            .withTimestampBound(timestampBound));
 
     FakeBatchTransactionId id = new FakeBatchTransactionId("runQueryTest");
     when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
@@ -336,14 +335,13 @@ public class SpannerIOReadTest implements Serializable {
 
     SpannerConfig spannerConfig = getSpannerConfig();
 
-    PCollection<Struct> one =
-        pipeline.apply(
-            "read q",
-            SpannerIO.read()
-                .withSpannerConfig(spannerConfig)
-                .withTable("users")
-                .withColumns("id", "name")
-                .withTimestampBound(timestampBound));
+    pipeline.apply(
+        "read q",
+        SpannerIO.read()
+            .withSpannerConfig(spannerConfig)
+            .withTable("users")
+            .withColumns("id", "name")
+            .withTimestampBound(timestampBound));
 
     FakeBatchTransactionId id = new FakeBatchTransactionId("runReadTest");
     when(mockBatchTx.getBatchTransactionId()).thenReturn(id);
diff --git 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOWriteTest.java
 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOWriteTest.java
index aa275f9..ea7ca68 100644
--- 
a/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOWriteTest.java
+++ 
b/sdks/java/io/google-cloud-platform/src/test/java/org/apache/beam/sdk/io/gcp/spanner/SpannerIOWriteTest.java
@@ -995,7 +995,6 @@ public class SpannerIOWriteTest implements Serializable {
           g(range)
         };
 
-    long mutationSize = MutationSizeEstimator.sizeOf(m(1L));
     BatchableMutationFilterFn testFn = new BatchableMutationFilterFn(null, 
null, 1000, 1000, 3);
 
     BatchableMutationFilterFn.ProcessContext mockProcessContext =
diff --git a/sdks/java/io/hadoop-common/build.gradle 
b/sdks/java/io/hadoop-common/build.gradle
index 327e6f3..6c5cc45 100644
--- a/sdks/java/io/hadoop-common/build.gradle
+++ b/sdks/java/io/hadoop-common/build.gradle
@@ -19,7 +19,7 @@
 import java.util.stream.Collectors
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.hadoop.common')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.hadoop.common')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: Hadoop Common"
 ext.summary = "Library to add shared Hadoop classes among Beam IOs."
diff --git a/sdks/java/io/hadoop-file-system/build.gradle 
b/sdks/java/io/hadoop-file-system/build.gradle
index 65aff2a..940dcb7 100644
--- a/sdks/java/io/hadoop-file-system/build.gradle
+++ b/sdks/java/io/hadoop-file-system/build.gradle
@@ -20,7 +20,6 @@ import java.util.stream.Collectors
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.hdfs')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: Hadoop File System"
diff --git a/sdks/java/io/hadoop-format/build.gradle 
b/sdks/java/io/hadoop-format/build.gradle
index d5301ad..d9f2c98 100644
--- a/sdks/java/io/hadoop-format/build.gradle
+++ b/sdks/java/io/hadoop-format/build.gradle
@@ -21,7 +21,6 @@ import java.util.stream.Collectors
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.hadoop.format',
 )
 provideIntegrationTestingDependencies()
diff --git 
a/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOIT.java
 
b/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOIT.java
index 822adb1..8973f58 100644
--- 
a/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOIT.java
+++ 
b/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOIT.java
@@ -102,8 +102,6 @@ public class HadoopFormatIOIT {
   private static Integer numberOfRows;
   private static String tableName;
   private static SerializableConfiguration hadoopConfiguration;
-  private static String bigQueryDataset;
-  private static String bigQueryTable;
   private static InfluxDBSettings settings;
   private static HadoopFormatIOITOptions options;
 
@@ -133,8 +131,6 @@ public class HadoopFormatIOIT {
     dataSource = DatabaseTestHelper.getPostgresDataSource(options);
     numberOfRows = options.getNumberOfRecords();
     tableName = DatabaseTestHelper.getTestTableName("HadoopFormatIOIT");
-    bigQueryDataset = options.getBigQueryDataset();
-    bigQueryTable = options.getBigQueryTable();
     if (!options.isWithTestcontainers()) {
       settings =
           InfluxDBSettings.builder()
diff --git 
a/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOReadTest.java
 
b/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOReadTest.java
index 8fa446f..bfad990 100644
--- 
a/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOReadTest.java
+++ 
b/sdks/java/io/hadoop-format/src/test/java/org/apache/beam/sdk/io/hadoop/format/HadoopFormatIOReadTest.java
@@ -48,7 +48,6 @@ import org.apache.beam.sdk.testing.TestPipeline;
 import org.apache.beam.sdk.transforms.SimpleFunction;
 import org.apache.beam.sdk.transforms.display.DisplayData;
 import org.apache.beam.sdk.values.KV;
-import org.apache.beam.sdk.values.PBegin;
 import org.apache.beam.sdk.values.PCollection;
 import org.apache.beam.sdk.values.Row;
 import org.apache.hadoop.conf.Configuration;
@@ -84,8 +83,6 @@ public class HadoopFormatIOReadTest {
   @Rule public final transient TestPipeline p = TestPipeline.create();
   @Rule public ExpectedException thrown = ExpectedException.none();
 
-  private PBegin input = PBegin.in(p);
-
   @BeforeClass
   public static void setUp() {
     serConf = loadTestConfiguration(EmployeeInputFormat.class, Text.class, 
Employee.class);
diff --git a/sdks/java/io/hbase/build.gradle b/sdks/java/io/hbase/build.gradle
index 2de5621..6dc9bb4 100644
--- a/sdks/java/io/hbase/build.gradle
+++ b/sdks/java/io/hbase/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.hbase')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.hbase')
 provideIntegrationTestingDependencies()
 enableJavaPerformanceTesting()
 
diff --git 
a/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseIO.java 
b/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseIO.java
index af925f5..04e0add 100644
--- a/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseIO.java
+++ b/sdks/java/io/hbase/src/main/java/org/apache/beam/sdk/io/hbase/HBaseIO.java
@@ -716,7 +716,6 @@ public class HBaseIO {
       HBaseWriterFn(Write write) {
         checkNotNull(write.tableId, "tableId");
         checkNotNull(write.configuration, "configuration");
-        this.write = write;
       }
 
       @Setup
@@ -760,7 +759,6 @@ public class HBaseIO {
         builder.delegate(Write.this);
       }
 
-      private final Write write;
       private long recordsWritten;
 
       private transient Connection connection;
diff --git a/sdks/java/io/hcatalog/build.gradle 
b/sdks/java/io/hcatalog/build.gradle
index 1fc3a16..67eb3d5 100644
--- a/sdks/java/io/hcatalog/build.gradle
+++ b/sdks/java/io/hcatalog/build.gradle
@@ -19,7 +19,7 @@
 import java.util.stream.Collectors
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.hcatalog')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.hcatalog')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: HCatalog"
 ext.summary = "IO to read and write for HCatalog source."
diff --git a/sdks/java/io/influxdb/build.gradle 
b/sdks/java/io/influxdb/build.gradle
index 1f84d02..706c51f 100644
--- a/sdks/java/io/influxdb/build.gradle
+++ b/sdks/java/io/influxdb/build.gradle
@@ -17,7 +17,7 @@
 */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.influxdb')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.influxdb')
 provideIntegrationTestingDependencies()
 enableJavaPerformanceTesting()
 
@@ -29,7 +29,6 @@ dependencies {
   compile library.java.influxdb_library
   compile "com.squareup.okhttp3:okhttp:4.6.0"
   compile library.java.vendored_guava_26_0_jre
-  compile library.java.slf4j_api
   testCompile library.java.junit
   testCompile library.java.powermock
   testCompile library.java.powermock_mockito
diff --git 
a/sdks/java/io/influxdb/src/main/java/org/apache/beam/sdk/io/influxdb/InfluxDbIO.java
 
b/sdks/java/io/influxdb/src/main/java/org/apache/beam/sdk/io/influxdb/InfluxDbIO.java
index 779f35e..b9cc3e4 100644
--- 
a/sdks/java/io/influxdb/src/main/java/org/apache/beam/sdk/io/influxdb/InfluxDbIO.java
+++ 
b/sdks/java/io/influxdb/src/main/java/org/apache/beam/sdk/io/influxdb/InfluxDbIO.java
@@ -56,8 +56,6 @@ import org.influxdb.dto.Query;
 import org.influxdb.dto.QueryResult;
 import org.influxdb.dto.QueryResult.Result;
 import org.influxdb.dto.QueryResult.Series;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * IO to read and write from InfluxDB.
@@ -103,7 +101,6 @@ import org.slf4j.LoggerFactory;
   "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
 })
 public class InfluxDbIO {
-  private static final Logger LOG = LoggerFactory.getLogger(InfluxDbIO.class);
 
   private static final String DEFAULT_RETENTION_POLICY = "autogen";
 
diff --git a/sdks/java/io/jdbc/build.gradle b/sdks/java/io/jdbc/build.gradle
index 18809bb..ca8616b 100644
--- a/sdks/java/io/jdbc/build.gradle
+++ b/sdks/java/io/jdbc/build.gradle
@@ -18,7 +18,6 @@
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.jdbc',
 )
 provideIntegrationTestingDependencies()
diff --git 
a/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java 
b/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java
index 8ea7086..8cebbbd 100644
--- a/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java
+++ b/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOIT.java
@@ -88,8 +88,6 @@ public class JdbcIOIT {
   private static int numberOfRows;
   private static PGSimpleDataSource dataSource;
   private static String tableName;
-  private static String bigQueryDataset;
-  private static String bigQueryTable;
   private static Long tableSize;
   private static InfluxDBSettings settings;
   @Rule public TestPipeline pipelineWrite = TestPipeline.create();
@@ -100,8 +98,6 @@ public class JdbcIOIT {
     PostgresIOTestPipelineOptions options =
         readIOTestPipelineOptions(PostgresIOTestPipelineOptions.class);
 
-    bigQueryDataset = options.getBigQueryDataset();
-    bigQueryTable = options.getBigQueryTable();
     numberOfRows = options.getNumberOfRecords();
     dataSource = DatabaseTestHelper.getPostgresDataSource(options);
     tableName = DatabaseTestHelper.getTestTableName("IT");
diff --git 
a/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java 
b/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java
index 9d1d75b..60d8cc7 100644
--- 
a/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java
+++ 
b/sdks/java/io/jdbc/src/test/java/org/apache/beam/sdk/io/jdbc/JdbcIOTest.java
@@ -94,13 +94,11 @@ import org.junit.Test;
 import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** Test on the JdbcIO. */
 @RunWith(JUnit4.class)
 public class JdbcIOTest implements Serializable {
-  private static final Logger LOG = LoggerFactory.getLogger(JdbcIOTest.class);
+
   private static final DataSourceConfiguration DATA_SOURCE_CONFIGURATION =
       DataSourceConfiguration.create(
           "org.apache.derby.jdbc.EmbeddedDriver", 
"jdbc:derby:memory:testDB;create=true");
@@ -462,17 +460,16 @@ public class JdbcIOTest implements Serializable {
   public void testIfNumPartitionsIsZero() {
     thrown.expect(IllegalArgumentException.class);
     thrown.expectMessage("numPartitions can not be less than 1");
-    PCollection<TestRow> rows =
-        pipeline.apply(
-            JdbcIO.<TestRow>readWithPartitions()
-                .withDataSourceConfiguration(DATA_SOURCE_CONFIGURATION)
-                .withRowMapper(new JdbcTestHelper.CreateTestRowOfNameAndId())
-                .withCoder(SerializableCoder.of(TestRow.class))
-                .withTable(READ_TABLE_NAME)
-                .withNumPartitions(0)
-                .withPartitionColumn("id")
-                .withLowerBound(0L)
-                .withUpperBound(1000L));
+    pipeline.apply(
+        JdbcIO.<TestRow>readWithPartitions()
+            .withDataSourceConfiguration(DATA_SOURCE_CONFIGURATION)
+            .withRowMapper(new JdbcTestHelper.CreateTestRowOfNameAndId())
+            .withCoder(SerializableCoder.of(TestRow.class))
+            .withTable(READ_TABLE_NAME)
+            .withNumPartitions(0)
+            .withPartitionColumn("id")
+            .withLowerBound(0L)
+            .withUpperBound(1000L));
     pipeline.run();
   }
 
@@ -956,7 +953,6 @@ public class JdbcIOTest implements Serializable {
 
     ArrayList<Row> data = new ArrayList<>();
     for (int i = 0; i < rowsToAdd; i++) {
-      List<Object> fields = new ArrayList<>();
 
       Row row =
           schema.getFields().stream()
diff --git a/sdks/java/io/jms/build.gradle b/sdks/java/io/jms/build.gradle
index fee6cc3..3f9d0c7 100644
--- a/sdks/java/io/jms/build.gradle
+++ b/sdks/java/io/jms/build.gradle
@@ -18,7 +18,6 @@
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.jms',
 )
 
diff --git a/sdks/java/io/kafka/build.gradle b/sdks/java/io/kafka/build.gradle
index 8ee0fc5..7a4ca21 100644
--- a/sdks/java/io/kafka/build.gradle
+++ b/sdks/java/io/kafka/build.gradle
@@ -20,7 +20,6 @@ import java.util.stream.Collectors
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.kafka',
   mavenRepositories: [
     [id: 'io.confluent', url: 'https://packages.confluent.io/maven/']
diff --git 
a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaExactlyOnceSink.java
 
b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaExactlyOnceSink.java
index d4589a2..75c8270 100644
--- 
a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaExactlyOnceSink.java
+++ 
b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/KafkaExactlyOnceSink.java
@@ -90,7 +90,8 @@ import org.slf4j.LoggerFactory;
  */
 @SuppressWarnings({
   "rawtypes", // TODO(https://issues.apache.org/jira/browse/BEAM-10556)
-  "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
+  "nullness", // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
+  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is 
released (2.11.0)
 })
 class KafkaExactlyOnceSink<K, V>
     extends PTransform<PCollection<ProducerRecord<K, V>>, PCollection<Void>> {
@@ -253,6 +254,7 @@ class KafkaExactlyOnceSink<K, V>
     // started with same groupId used for storing state on Kafka side, 
including the case where
     // a job is restarted with same groupId, but the metadata from previous 
run was not cleared.
     // Better to be safe and error out with a clear message.
+
     @StateId(WRITER_ID)
     private final StateSpec<ValueState<String>> writerIdSpec = 
StateSpecs.value();
 
diff --git 
a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/WatchKafkaTopicPartitionDoFn.java
 
b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/WatchKafkaTopicPartitionDoFn.java
index 46cd823..c4e6272 100644
--- 
a/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/WatchKafkaTopicPartitionDoFn.java
+++ 
b/sdks/java/io/kafka/src/main/java/org/apache/beam/sdk/io/kafka/WatchKafkaTopicPartitionDoFn.java
@@ -49,7 +49,10 @@ import org.joda.time.Instant;
  * 
https://docs.google.com/document/d/1FU3GxVRetHPLVizP3Mdv6mP5tpjZ3fd99qNjUI5DT5k/edit#
 for more
  * details.
  */
-@SuppressWarnings({"nullness"})
+@SuppressWarnings({
+  "nullness",
+  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is 
released (2.11.0)
+})
 @Experimental
 class WatchKafkaTopicPartitionDoFn extends DoFn<KV<byte[], byte[]>, 
KafkaSourceDescriptor> {
 
diff --git 
a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOExternalTest.java
 
b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOExternalTest.java
index c699885..89f64b1 100644
--- 
a/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOExternalTest.java
+++ 
b/sdks/java/io/kafka/src/test/java/org/apache/beam/sdk/io/kafka/KafkaIOExternalTest.java
@@ -255,8 +255,7 @@ public class KafkaIOExternalTest {
 
     RunnerApi.PTransform kafkaReadComposite =
         
result.getComponents().getTransformsOrThrow(transform.getSubtransforms(0));
-    RunnerApi.PTransform kafkaComposite =
-        
result.getComponents().getTransformsOrThrow(kafkaReadComposite.getSubtransforms(0));
+    
result.getComponents().getTransformsOrThrow(kafkaReadComposite.getSubtransforms(0));
 
     verifyKafkaReadComposite(
         
result.getComponents().getTransformsOrThrow(kafkaReadComposite.getSubtransforms(0)),
diff --git a/sdks/java/io/kinesis/build.gradle 
b/sdks/java/io/kinesis/build.gradle
index 0085df6..cc5334f 100644
--- a/sdks/java/io/kinesis/build.gradle
+++ b/sdks/java/io/kinesis/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.kinesis')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.kinesis')
 provideIntegrationTestingDependencies()
 enableJavaPerformanceTesting()
 
diff --git a/sdks/java/io/kinesis/expansion-service/build.gradle 
b/sdks/java/io/kinesis/expansion-service/build.gradle
index 5974901..c186e63 100644
--- a/sdks/java/io/kinesis/expansion-service/build.gradle
+++ b/sdks/java/io/kinesis/expansion-service/build.gradle
@@ -21,7 +21,6 @@ apply plugin: 'application'
 mainClassName = "org.apache.beam.sdk.expansion.service.ExpansionService"
 
 applyJavaNature(
-  suppressUnusedVariable: true,
     automaticModuleName: 'org.apache.beam.sdk.io.kinesis.expansion.service',
     exportJavadoc: false,
     validateShadowJar: false,
diff --git 
a/sdks/java/io/kinesis/src/test/java/org/apache/beam/sdk/io/kinesis/KinesisIOReadTest.java
 
b/sdks/java/io/kinesis/src/test/java/org/apache/beam/sdk/io/kinesis/KinesisIOReadTest.java
index 1f7c493..fdacc62 100644
--- 
a/sdks/java/io/kinesis/src/test/java/org/apache/beam/sdk/io/kinesis/KinesisIOReadTest.java
+++ 
b/sdks/java/io/kinesis/src/test/java/org/apache/beam/sdk/io/kinesis/KinesisIOReadTest.java
@@ -68,7 +68,6 @@ public class KinesisIOReadTest {
   public void testReadWithBasicCredentialsAndCustomEndpoint() {
     String customEndpoint = "localhost:9999";
     Regions region = Regions.US_WEST_1;
-    BasicAWSCredentials credentials = new BasicAWSCredentials(ACCESS_KEY_ID, 
SECRET_ACCESS_KEY);
 
     Read<KinesisRecord> read =
         KinesisIO.read()
@@ -102,7 +101,6 @@ public class KinesisIOReadTest {
   public void testReadWithBasicCredentialsAndVerificationDisabled() {
     String customEndpoint = "localhost:9999";
     Regions region = Regions.US_WEST_1;
-    BasicAWSCredentials credentials = new BasicAWSCredentials(ACCESS_KEY_ID, 
SECRET_ACCESS_KEY);
 
     Read<KinesisRecord> read =
         KinesisIO.read()
diff --git 
a/sdks/java/io/kinesis/src/test/java/org/apache/beam/sdk/io/kinesis/serde/AwsModuleTest.java
 
b/sdks/java/io/kinesis/src/test/java/org/apache/beam/sdk/io/kinesis/serde/AwsModuleTest.java
index 8ceda61..e58825e 100644
--- 
a/sdks/java/io/kinesis/src/test/java/org/apache/beam/sdk/io/kinesis/serde/AwsModuleTest.java
+++ 
b/sdks/java/io/kinesis/src/test/java/org/apache/beam/sdk/io/kinesis/serde/AwsModuleTest.java
@@ -99,8 +99,6 @@ public class AwsModuleTest {
     credentialsProvider =
         new AWSStaticCredentialsProvider(
             new BasicSessionCredentials(ACCESS_KEY_ID, SECRET_ACCESS_KEY, 
SESSION_TOKEN));
-    serializedCredentialsProvider = serialize(credentialsProvider);
-    deserializedCredentialsProvider = 
deserializeCredentialsProvider(serializedCredentialsProvider);
 
     checkStaticSessionCredentials(credentialsProvider);
   }
diff --git a/sdks/java/io/kudu/build.gradle b/sdks/java/io/kudu/build.gradle
index 6509211..7f28d5e 100644
--- a/sdks/java/io/kudu/build.gradle
+++ b/sdks/java/io/kudu/build.gradle
@@ -18,7 +18,6 @@
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
   classesTriggerCheckerBugs: [
     'KuduTestUtils': 'TODO: file a bug report',
     'KuduIOIT': 'TODO: file a bug report',
diff --git 
a/sdks/java/io/kudu/src/main/java/org/apache/beam/sdk/io/kudu/KuduIO.java 
b/sdks/java/io/kudu/src/main/java/org/apache/beam/sdk/io/kudu/KuduIO.java
index 6ba383c..a8d794e 100644
--- a/sdks/java/io/kudu/src/main/java/org/apache/beam/sdk/io/kudu/KuduIO.java
+++ b/sdks/java/io/kudu/src/main/java/org/apache/beam/sdk/io/kudu/KuduIO.java
@@ -50,8 +50,6 @@ import org.apache.kudu.client.KuduPredicate;
 import org.apache.kudu.client.Operation;
 import org.apache.kudu.client.RowResult;
 import org.checkerframework.checker.nullness.qual.Nullable;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * A bounded source and sink for Kudu.
@@ -126,7 +124,6 @@ import org.slf4j.LoggerFactory;
   "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
 })
 public class KuduIO {
-  private static final Logger LOG = LoggerFactory.getLogger(KuduIO.class);
 
   private KuduIO() {}
 
diff --git 
a/sdks/java/io/kudu/src/test/java/org/apache/beam/sdk/io/kudu/KuduIOIT.java 
b/sdks/java/io/kudu/src/test/java/org/apache/beam/sdk/io/kudu/KuduIOIT.java
index a50209d..1add8f2 100644
--- a/sdks/java/io/kudu/src/test/java/org/apache/beam/sdk/io/kudu/KuduIOIT.java
+++ b/sdks/java/io/kudu/src/test/java/org/apache/beam/sdk/io/kudu/KuduIOIT.java
@@ -52,8 +52,6 @@ import org.junit.Test;
 import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /**
  * A test of {@link org.apache.beam.sdk.io.kudu.KuduIO} on an independent Kudu 
instance.
@@ -90,7 +88,6 @@ import org.slf4j.LoggerFactory;
  */
 @RunWith(JUnit4.class)
 public class KuduIOIT {
-  private static final Logger LOG = LoggerFactory.getLogger(KuduIOIT.class);
 
   /** KuduIOIT options. */
   public interface KuduPipelineOptions extends IOTestPipelineOptions {
diff --git 
a/sdks/java/io/kudu/src/test/java/org/apache/beam/sdk/io/kudu/KuduIOTest.java 
b/sdks/java/io/kudu/src/test/java/org/apache/beam/sdk/io/kudu/KuduIOTest.java
index 86d60e0..66f908c 100644
--- 
a/sdks/java/io/kudu/src/test/java/org/apache/beam/sdk/io/kudu/KuduIOTest.java
+++ 
b/sdks/java/io/kudu/src/test/java/org/apache/beam/sdk/io/kudu/KuduIOTest.java
@@ -66,7 +66,6 @@ import org.slf4j.LoggerFactory;
  */
 @RunWith(JUnit4.class)
 public class KuduIOTest {
-  private static final Logger LOG = LoggerFactory.getLogger(KuduIOTest.class);
 
   @Rule public final TestPipeline writePipeline = TestPipeline.create();
   @Rule public final TestPipeline readPipeline = TestPipeline.create();
diff --git a/sdks/java/io/mongodb/build.gradle 
b/sdks/java/io/mongodb/build.gradle
index e8aa0e3..1090a2c 100644
--- a/sdks/java/io/mongodb/build.gradle
+++ b/sdks/java/io/mongodb/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.mongodb')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.mongodb')
 provideIntegrationTestingDependencies()
 enableJavaPerformanceTesting()
 
diff --git 
a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
 
b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
index 8c7f03b..c70bfae 100644
--- 
a/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
+++ 
b/sdks/java/io/mongodb/src/main/java/org/apache/beam/sdk/io/mongodb/MongoDbIO.java
@@ -628,7 +628,6 @@ public class MongoDbIO {
       ObjectId lowestBound = null; // lower boundary (previous split in the 
iteration)
       for (int i = 0; i < splitKeys.size(); i++) {
         ObjectId splitKey = splitKeys.get(i).getObjectId("_id");
-        String rangeFilter;
         if (i == 0) {
           aggregates.add(Aggregates.match(Filters.lte("_id", splitKey)));
           if (splitKeys.size() == 1) {
diff --git 
a/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBIOIT.java
 
b/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBIOIT.java
index cd83275..6b696a7 100644
--- 
a/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBIOIT.java
+++ 
b/sdks/java/io/mongodb/src/test/java/org/apache/beam/sdk/io/mongodb/MongoDBIOIT.java
@@ -81,8 +81,6 @@ import org.junit.runners.JUnit4;
 public class MongoDBIOIT {
 
   private static final String NAMESPACE = MongoDBIOIT.class.getName();
-  private static String bigQueryDataset;
-  private static String bigQueryTable;
   private static String mongoUrl;
   private static MongoClient mongoClient;
   private static InfluxDBSettings settings;
@@ -128,8 +126,6 @@ public class MongoDBIOIT {
     PipelineOptionsFactory.register(MongoDBPipelineOptions.class);
     options = 
TestPipeline.testingPipelineOptions().as(MongoDBPipelineOptions.class);
     collection = String.format("test_%s", new Date().getTime());
-    bigQueryDataset = options.getBigQueryDataset();
-    bigQueryTable = options.getBigQueryTable();
     mongoUrl =
         String.format("mongodb://%s:%s", options.getMongoDBHostName(), 
options.getMongoDBPort());
     mongoClient = MongoClients.create(mongoUrl);
diff --git a/sdks/java/io/mqtt/build.gradle b/sdks/java/io/mqtt/build.gradle
index 0468bd2..f6e9925 100644
--- a/sdks/java/io/mqtt/build.gradle
+++ b/sdks/java/io/mqtt/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.mqtt')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.mqtt')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: MQTT"
 ext.summary = "IO to read and write to a MQTT broker."
diff --git a/sdks/java/io/parquet/build.gradle 
b/sdks/java/io/parquet/build.gradle
index 92582e5..8e9f679 100644
--- a/sdks/java/io/parquet/build.gradle
+++ b/sdks/java/io/parquet/build.gradle
@@ -20,7 +20,6 @@ import java.util.stream.Collectors
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.parquet',
 )
 
diff --git 
a/sdks/java/io/parquet/src/test/java/org/apache/beam/sdk/io/parquet/ParquetIOTest.java
 
b/sdks/java/io/parquet/src/test/java/org/apache/beam/sdk/io/parquet/ParquetIOTest.java
index 332e896..d03f6a3 100644
--- 
a/sdks/java/io/parquet/src/test/java/org/apache/beam/sdk/io/parquet/ParquetIOTest.java
+++ 
b/sdks/java/io/parquet/src/test/java/org/apache/beam/sdk/io/parquet/ParquetIOTest.java
@@ -386,7 +386,6 @@ public class ParquetIOTest implements Serializable {
     ArrayList<GenericRecord> data = new ArrayList<>();
     GenericRecordBuilder builder = new 
GenericRecordBuilder(REQUESTED_ENCODER_SCHEMA);
     for (int i = 0; i < count; i++) {
-      int index = i % SCIENTISTS.length;
       GenericRecord record = builder.set("id", 
Integer.toString(i)).set("name", null).build();
       data.add(record);
     }
diff --git a/sdks/java/io/rabbitmq/build.gradle 
b/sdks/java/io/rabbitmq/build.gradle
index 08c5a79..0f9a9ac 100644
--- a/sdks/java/io/rabbitmq/build.gradle
+++ b/sdks/java/io/rabbitmq/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.rabbitmq')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.rabbitmq')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: RabbitMQ"
 ext.summary = "IO to read and write to a RabbitMQ broker."
diff --git 
a/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/RabbitMqIOTest.java
 
b/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/RabbitMqIOTest.java
index c7b3c0e..2ceae33 100644
--- 
a/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/RabbitMqIOTest.java
+++ 
b/sdks/java/io/rabbitmq/src/test/java/org/apache/beam/sdk/io/rabbitmq/RabbitMqIOTest.java
@@ -63,13 +63,10 @@ import org.junit.rules.TemporaryFolder;
 import org.junit.rules.Timeout;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** Test of {@link RabbitMqIO}. */
 @RunWith(JUnit4.class)
 public class RabbitMqIOTest implements Serializable {
-  private static final Logger LOG = 
LoggerFactory.getLogger(RabbitMqIOTest.class);
 
   private static int port;
   private static String defaultPort;
diff --git a/sdks/java/io/redis/build.gradle b/sdks/java/io/redis/build.gradle
index f7092fb..0f668e8 100644
--- a/sdks/java/io/redis/build.gradle
+++ b/sdks/java/io/redis/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.redis')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.redis')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: Redis"
 ext.summary ="IO to read and write on a Redis keystore."
diff --git a/sdks/java/io/snowflake/build.gradle 
b/sdks/java/io/snowflake/build.gradle
index 3fe128e..c3c5d06 100644
--- a/sdks/java/io/snowflake/build.gradle
+++ b/sdks/java/io/snowflake/build.gradle
@@ -17,7 +17,6 @@
  */
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.snowflake',
 )
 provideIntegrationTestingDependencies()
diff --git a/sdks/java/io/snowflake/expansion-service/build.gradle 
b/sdks/java/io/snowflake/expansion-service/build.gradle
index 23a332e..247a622 100644
--- a/sdks/java/io/snowflake/expansion-service/build.gradle
+++ b/sdks/java/io/snowflake/expansion-service/build.gradle
@@ -21,7 +21,6 @@ apply plugin: 'application'
 mainClassName = "org.apache.beam.sdk.expansion.service.ExpansionService"
 
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.expansion.service',
   exportJavadoc: false,
   validateShadowJar: false,
diff --git 
a/sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/SnowflakeIO.java
 
b/sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/SnowflakeIO.java
index 05064bf..1071997 100644
--- 
a/sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/SnowflakeIO.java
+++ 
b/sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/SnowflakeIO.java
@@ -1284,7 +1284,6 @@ public class SnowflakeIO {
     private final SnowflakeService snowflakeService;
     private transient SimpleIngestManager ingestManager;
 
-    private transient DataSource dataSource;
     ArrayList<String> trackedFilesNames;
 
     StreamToTableFn(
@@ -1303,7 +1302,7 @@ public class SnowflakeIO {
 
     @Setup
     public void setup() throws Exception {
-      dataSource = dataSourceProviderFn.apply(null);
+      dataSourceProviderFn.apply(null);
 
       DataSourceProviderFromDataSourceConfiguration 
dataSourceProviderFromDataSourceConfiguration =
           (DataSourceProviderFromDataSourceConfiguration) 
this.dataSourceProviderFn;
diff --git 
a/sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/services/SnowflakeBatchServiceImpl.java
 
b/sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/services/SnowflakeBatchServiceImpl.java
index 2927829..2700d8b 100644
--- 
a/sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/services/SnowflakeBatchServiceImpl.java
+++ 
b/sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/services/SnowflakeBatchServiceImpl.java
@@ -33,8 +33,6 @@ import 
org.apache.beam.sdk.io.snowflake.data.SnowflakeTableSchema;
 import org.apache.beam.sdk.io.snowflake.enums.CreateDisposition;
 import org.apache.beam.sdk.io.snowflake.enums.WriteDisposition;
 import org.apache.beam.sdk.transforms.SerializableFunction;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** Implemenation of {@link SnowflakeService} used in production. */
 @SuppressWarnings({
@@ -42,7 +40,6 @@ import org.slf4j.LoggerFactory;
   "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
 })
 public class SnowflakeBatchServiceImpl implements 
SnowflakeService<SnowflakeBatchServiceConfig> {
-  private static final Logger LOG = 
LoggerFactory.getLogger(SnowflakeBatchServiceImpl.class);
   private static final String SNOWFLAKE_GCS_PREFIX = "gcs://";
   private static final String GCS_PREFIX = "gs://";
 
diff --git 
a/sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/services/SnowflakeStreamingServiceImpl.java
 
b/sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/services/SnowflakeStreamingServiceImpl.java
index da0c328..2634a17 100644
--- 
a/sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/services/SnowflakeStreamingServiceImpl.java
+++ 
b/sdks/java/io/snowflake/src/main/java/org/apache/beam/sdk/io/snowflake/services/SnowflakeStreamingServiceImpl.java
@@ -24,8 +24,6 @@ import java.util.Set;
 import java.util.stream.Collectors;
 import net.snowflake.ingest.SimpleIngestManager;
 import net.snowflake.ingest.connection.IngestResponseException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 /** Implemenation of {@link SnowflakeService} used in production. */
 @SuppressWarnings({
@@ -33,7 +31,7 @@ import org.slf4j.LoggerFactory;
 })
 public class SnowflakeStreamingServiceImpl
     implements SnowflakeService<SnowflakeStreamingServiceConfig> {
-  private static final Logger LOG = 
LoggerFactory.getLogger(SnowflakeStreamingServiceImpl.class);
+
   private transient SimpleIngestManager ingestManager;
 
   /** Writing data to Snowflake in streaming mode. */
diff --git 
a/sdks/java/io/snowflake/src/test/java/org/apache/beam/sdk/io/snowflake/test/unit/DataSourceConfigurationTest.java
 
b/sdks/java/io/snowflake/src/test/java/org/apache/beam/sdk/io/snowflake/test/unit/DataSourceConfigurationTest.java
index 6bac967..3170206 100644
--- 
a/sdks/java/io/snowflake/src/test/java/org/apache/beam/sdk/io/snowflake/test/unit/DataSourceConfigurationTest.java
+++ 
b/sdks/java/io/snowflake/src/test/java/org/apache/beam/sdk/io/snowflake/test/unit/DataSourceConfigurationTest.java
@@ -194,8 +194,6 @@ public class DataSourceConfigurationTest {
             .withServerName(SERVER_NAME)
             .withKeyPairPathAuth(USERNAME, privateKeyPath, keyPassphrase);
 
-    DataSource dataSource = configuration.buildDatasource();
-
     assertEquals(USERNAME, configuration.getUsername().get());
     //  TODO  assertEquals(privateKeyPath, configuration.getPrivateKeyPath());
     assertEquals(keyPassphrase, configuration.getPrivateKeyPassphrase().get());
@@ -239,8 +237,6 @@ public class DataSourceConfigurationTest {
             .withServerName(SERVER_NAME)
             .withKeyPairRawAuth(USERNAME, rawPrivateKey, keyPassphrase);
 
-    DataSource dataSource = configuration.buildDatasource();
-
     assertEquals(USERNAME, configuration.getUsername().get());
     assertEquals(rawPrivateKey, configuration.getRawPrivateKey().get());
     assertEquals(keyPassphrase, configuration.getPrivateKeyPassphrase().get());
diff --git 
a/sdks/java/io/snowflake/src/test/java/org/apache/beam/sdk/io/snowflake/test/unit/write/SchemaDispositionTest.java
 
b/sdks/java/io/snowflake/src/test/java/org/apache/beam/sdk/io/snowflake/test/unit/write/SchemaDispositionTest.java
index d150f44..89edc91 100644
--- 
a/sdks/java/io/snowflake/src/test/java/org/apache/beam/sdk/io/snowflake/test/unit/write/SchemaDispositionTest.java
+++ 
b/sdks/java/io/snowflake/src/test/java/org/apache/beam/sdk/io/snowflake/test/unit/write/SchemaDispositionTest.java
@@ -58,7 +58,6 @@ import org.junit.runners.JUnit4;
   "rawtypes", // TODO(https://issues.apache.org/jira/browse/BEAM-10556)
 })
 public class SchemaDispositionTest {
-  private static final String FAKE_TABLE = "FAKE_TABLE";
   private static final String BUCKET_NAME = "BUCKET/";
 
   @Rule public final transient TestPipeline pipeline = TestPipeline.create();
diff --git 
a/sdks/java/io/snowflake/src/test/java/org/apache/beam/sdk/io/snowflake/test/unit/write/StreamingWriteTest.java
 
b/sdks/java/io/snowflake/src/test/java/org/apache/beam/sdk/io/snowflake/test/unit/write/StreamingWriteTest.java
index 0ee2928..7c68fc2 100644
--- 
a/sdks/java/io/snowflake/src/test/java/org/apache/beam/sdk/io/snowflake/test/unit/write/StreamingWriteTest.java
+++ 
b/sdks/java/io/snowflake/src/test/java/org/apache/beam/sdk/io/snowflake/test/unit/write/StreamingWriteTest.java
@@ -56,15 +56,13 @@ import org.junit.Test;
 import org.junit.rules.ExpectedException;
 import org.junit.runner.RunWith;
 import org.junit.runners.JUnit4;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 
 @RunWith(JUnit4.class)
 @SuppressWarnings({
   "rawtypes", // TODO(https://issues.apache.org/jira/browse/BEAM-10556)
 })
 public class StreamingWriteTest {
-  private static final Logger LOG = 
LoggerFactory.getLogger(StreamingWriteTest.class);
+
   private static final String FAKE_TABLE = "TEST_TABLE";
   private static final String STAGING_BUCKET_NAME = "BUCKET/";
   private static final String STORAGE_INTEGRATION_NAME = "STORAGE_INTEGRATION";
diff --git a/sdks/java/io/solr/build.gradle b/sdks/java/io/solr/build.gradle
index a09191f..efdfde2 100644
--- a/sdks/java/io/solr/build.gradle
+++ b/sdks/java/io/solr/build.gradle
@@ -18,7 +18,6 @@
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
   automaticModuleName: 'org.apache.beam.sdk.io.solr',
 )
 
diff --git a/sdks/java/io/splunk/build.gradle b/sdks/java/io/splunk/build.gradle
index 872793a..d8ea800 100644
--- a/sdks/java/io/splunk/build.gradle
+++ b/sdks/java/io/splunk/build.gradle
@@ -19,7 +19,7 @@
 plugins {
     id 'org.apache.beam.module'
 }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.splunk')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.splunk')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: Splunk"
 ext.summary = "IO to write events to Splunk Http Event Collector (HEC)"
diff --git 
a/sdks/java/io/splunk/src/main/java/org/apache/beam/sdk/io/splunk/SplunkEventWriter.java
 
b/sdks/java/io/splunk/src/main/java/org/apache/beam/sdk/io/splunk/SplunkEventWriter.java
index 7e41f15..51ded28 100644
--- 
a/sdks/java/io/splunk/src/main/java/org/apache/beam/sdk/io/splunk/SplunkEventWriter.java
+++ 
b/sdks/java/io/splunk/src/main/java/org/apache/beam/sdk/io/splunk/SplunkEventWriter.java
@@ -55,7 +55,8 @@ import org.slf4j.LoggerFactory;
 /** A {@link DoFn} to write {@link SplunkEvent}s to Splunk's HEC endpoint. */
 @AutoValue
 @SuppressWarnings({
-  "nullness" // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
+  "nullness", // TODO(https://issues.apache.org/jira/browse/BEAM-10402)
+  "unused" // TODO(BEAM-13271): Remove when new version of errorprone is 
released (2.11.0)
 })
 abstract class SplunkEventWriter extends DoFn<KV<Integer, SplunkEvent>, 
SplunkWriteError> {
 
diff --git a/sdks/java/io/synthetic/build.gradle 
b/sdks/java/io/synthetic/build.gradle
index bb428e6..65d58ce 100644
--- a/sdks/java/io/synthetic/build.gradle
+++ b/sdks/java/io/synthetic/build.gradle
@@ -18,7 +18,6 @@
 
 plugins { id 'org.apache.beam.module' }
 applyJavaNature(
-  suppressUnusedVariable: true,
   exportJavadoc: false,
   automaticModuleName: 'org.apache.beam.sdk.io.synthetic',
 )
diff --git a/sdks/java/io/thrift/build.gradle b/sdks/java/io/thrift/build.gradle
index 8e6f8dc..e012c7e 100644
--- a/sdks/java/io/thrift/build.gradle
+++ b/sdks/java/io/thrift/build.gradle
@@ -21,7 +21,6 @@ plugins {
     // id "org.jruyi.thrift" version "0.4.1"
 }
 applyJavaNature(
-  suppressUnusedVariable: true,
         automaticModuleName: 'org.apache.beam.sdk.io.thrift',
         generatedClassPatterns: [
                 /^org\.apache\.beam\.sdk\.io\.thrift\.payloads.*/,
diff --git 
a/sdks/java/io/thrift/src/test/java/org/apache/beam/sdk/io/thrift/ThriftIOTest.java
 
b/sdks/java/io/thrift/src/test/java/org/apache/beam/sdk/io/thrift/ThriftIOTest.java
index afdc9c3..0b01fb3 100644
--- 
a/sdks/java/io/thrift/src/test/java/org/apache/beam/sdk/io/thrift/ThriftIOTest.java
+++ 
b/sdks/java/io/thrift/src/test/java/org/apache/beam/sdk/io/thrift/ThriftIOTest.java
@@ -36,7 +36,6 @@ import org.apache.thrift.protocol.TBinaryProtocol;
 import org.apache.thrift.protocol.TCompactProtocol;
 import org.apache.thrift.protocol.TJSONProtocol;
 import org.apache.thrift.protocol.TProtocolFactory;
-import org.apache.thrift.protocol.TSimpleJSONProtocol;
 import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
@@ -51,13 +50,12 @@ public class ThriftIOTest implements Serializable {
   private static final String RESOURCE_DIR = "ThriftIOTest/";
 
   private static final String THRIFT_DIR = 
Resources.getResource(RESOURCE_DIR).getPath();
-  private static final String ALL_THRIFT_STRING =
-      Resources.getResource(RESOURCE_DIR).getPath() + "*";
+
   private static final TestThriftStruct TEST_THRIFT_STRUCT = new 
TestThriftStruct();
   private static List<TestThriftStruct> testThriftStructs;
   private final TProtocolFactory tBinaryProtoFactory = new 
TBinaryProtocol.Factory();
   private final TProtocolFactory tJsonProtocolFactory = new 
TJSONProtocol.Factory();
-  private final TProtocolFactory tSimpleJsonProtocolFactory = new 
TSimpleJSONProtocol.Factory();
+
   private final TProtocolFactory tCompactProtocolFactory = new 
TCompactProtocol.Factory();
   @Rule public transient TestPipeline mainPipeline = TestPipeline.create();
   @Rule public transient TestPipeline readPipeline = TestPipeline.create();
diff --git a/sdks/java/io/tika/build.gradle b/sdks/java/io/tika/build.gradle
index 5b13e47..c6eab51 100644
--- a/sdks/java/io/tika/build.gradle
+++ b/sdks/java/io/tika/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.tika')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.tika')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: Tika"
 ext.summary = "Tika Input to parse files."
diff --git a/sdks/java/io/xml/build.gradle b/sdks/java/io/xml/build.gradle
index 6548793..22e5b76 100644
--- a/sdks/java/io/xml/build.gradle
+++ b/sdks/java/io/xml/build.gradle
@@ -17,7 +17,7 @@
  */
 
 plugins { id 'org.apache.beam.module' }
-applyJavaNature(suppressUnusedVariable: true, automaticModuleName: 
'org.apache.beam.sdk.io.xml')
+applyJavaNature( automaticModuleName: 'org.apache.beam.sdk.io.xml')
 
 description = "Apache Beam :: SDKs :: Java :: IO :: XML"
 ext.summary = "IO to read and write XML files."

Reply via email to