This is an automated email from the ASF dual-hosted git repository. schofielaj pushed a commit to branch trunk in repository https://gitbox.apache.org/repos/asf/kafka.git
The following commit(s) were added to refs/heads/trunk by this push: new 1fc25d83894 MINOR: remove arguments from AsyncKafkaConsumerTest.newConsumer() that are identical (#20426) 1fc25d83894 is described below commit 1fc25d83894ff3a0d3f2e9d60fc14e38ee68877a Author: Kirk True <k...@kirktrue.pro> AuthorDate: Thu Aug 28 01:56:39 2025 -0700 MINOR: remove arguments from AsyncKafkaConsumerTest.newConsumer() that are identical (#20426) Very minor cleanup of redundant arguments in `AsyncKafkaConsumerTest`. Reviewers: Andrew Schofield <aschofi...@confluent.io> --- .../consumer/internals/AsyncKafkaConsumerTest.java | 46 ++++++---------------- 1 file changed, 11 insertions(+), 35 deletions(-) diff --git a/clients/src/test/java/org/apache/kafka/clients/consumer/internals/AsyncKafkaConsumerTest.java b/clients/src/test/java/org/apache/kafka/clients/consumer/internals/AsyncKafkaConsumerTest.java index 16e4c9d4b3f..c03d1553ac5 100644 --- a/clients/src/test/java/org/apache/kafka/clients/consumer/internals/AsyncKafkaConsumerTest.java +++ b/clients/src/test/java/org/apache/kafka/clients/consumer/internals/AsyncKafkaConsumerTest.java @@ -250,16 +250,13 @@ public class AsyncKafkaConsumerTest { FetchBuffer fetchBuffer, ConsumerInterceptors<String, String> interceptors, ConsumerRebalanceListenerInvoker rebalanceListenerInvoker, - SubscriptionState subscriptions, - String groupId, - String clientId, - boolean autoCommitEnabled) { + SubscriptionState subscriptions) { long retryBackoffMs = 100L; int requestTimeoutMs = 30000; int defaultApiTimeoutMs = 1000; return new AsyncKafkaConsumer<>( new LogContext(), - clientId, + "client-id", new Deserializers<>(new StringDeserializer(), new StringDeserializer(), metrics), fetchBuffer, fetchCollector, @@ -275,8 +272,8 @@ public class AsyncKafkaConsumerTest { retryBackoffMs, requestTimeoutMs, defaultApiTimeoutMs, - groupId, - autoCommitEnabled); + "group-id", + false); } @Test @@ -707,10 +704,7 @@ public class AsyncKafkaConsumerTest { mock(FetchBuffer.class), mock(ConsumerInterceptors.class), mock(ConsumerRebalanceListenerInvoker.class), - subscriptions, - "group-id", - "client-id", - false)); + subscriptions)); consumer.close(CloseOptions.timeout(Duration.ofMillis(timeoutMs))); verify(applicationEventHandler).addAndGet(any(LeaveGroupOnCloseEvent.class)); } @@ -730,10 +724,7 @@ public class AsyncKafkaConsumerTest { mock(FetchBuffer.class), new ConsumerInterceptors<>(Collections.emptyList(), metrics), invoker, - subscriptions, - "group-id", - "client-id", - false)); + subscriptions)); consumer.setGroupAssignmentSnapshot(partitions); Throwable t = assertThrows(KafkaException.class, () -> consumer.close(CloseOptions.timeout(Duration.ZERO))); @@ -754,10 +745,7 @@ public class AsyncKafkaConsumerTest { mock(FetchBuffer.class), mock(ConsumerInterceptors.class), mock(ConsumerRebalanceListenerInvoker.class), - subscriptions, - "group-id", - "client-id", - false)); + subscriptions)); Duration timeout = Duration.ofMillis(timeoutMs); @@ -778,10 +766,7 @@ public class AsyncKafkaConsumerTest { mock(FetchBuffer.class), mock(ConsumerInterceptors.class), mock(ConsumerRebalanceListenerInvoker.class), - subscriptions, - "group-id", - "client-id", - false); + subscriptions); completeTopicSubscriptionChangeEventSuccessfully(); consumer.subscribe(singleton("topic"), mock(ConsumerRebalanceListener.class)); subscriptions.assignFromSubscribed(singleton(new TopicPartition("topic", 0))); @@ -803,10 +788,7 @@ public class AsyncKafkaConsumerTest { mock(FetchBuffer.class), mock(ConsumerInterceptors.class), mock(ConsumerRebalanceListenerInvoker.class), - subscriptions, - "group-id", - "client-id", - false); + subscriptions); completeTopicSubscriptionChangeEventSuccessfully(); consumer.subscribe(singleton("topic"), mock(ConsumerRebalanceListener.class)); subscriptions.assignFromSubscribed(singleton(new TopicPartition("topic", 0))); @@ -1651,10 +1633,7 @@ public class AsyncKafkaConsumerTest { mock(FetchBuffer.class), new ConsumerInterceptors<>(Collections.emptyList(), metrics), mock(ConsumerRebalanceListenerInvoker.class), - subscriptions, - "group-id", - "client-id", - false); + subscriptions); final TopicPartition tp = new TopicPartition("topic", 0); final List<ConsumerRecord<String, String>> records = singletonList( new ConsumerRecord<>("topic", 0, 2, "key1", "value1")); @@ -2034,10 +2013,7 @@ public class AsyncKafkaConsumerTest { mock(FetchBuffer.class), mock(ConsumerInterceptors.class), mock(ConsumerRebalanceListenerInvoker.class), - mock(SubscriptionState.class), - "group-id", - "client-id", - false); + mock(SubscriptionState.class)); Metrics metrics = consumer.metricsRegistry(); AsyncConsumerMetrics kafkaConsumerMetrics = consumer.kafkaConsumerMetrics();