Hello everyone,

I'm currently working with a single Kafka 3.9.1 instance running in Docker, and 
I've set up a single topic named "jupiter-events".

My Kafka producer is successfully sending messages to the topic, and I've 
confirmed that these messages are being received by the Kafka instance.

However, I'm facing an issue where my Kafka consumer does not receive any 
messages when polling the topic. According to the logs, the consumer appears to 
be functioning correctly, but it always receives an empty list during polling.

I've attached the configuration files and logs for both the producer and the 
consumer for reference.

Could anyone kindly take a look and let me know if there’s something I might be 
missing?

Thank you in advance for your time and assistance!

Kind regards,
Jakob
2025-07-21T09:52:50.878+02:00  INFO 1 --- [nio-8080-exec-4] 
o.a.k.c.t.i.KafkaMetricsCollector        : initializing Kafka metrics collector
2025-07-21T09:52:50.909+02:00  INFO 1 --- [nio-8080-exec-4] 
o.a.k.clients.producer.KafkaProducer     : [Producer clientId=producer-1] 
Instantiated an idempotent producer.
2025-07-21T09:52:50.968+02:00  INFO 1 --- [nio-8080-exec-4] 
o.a.kafka.common.utils.AppInfoParser     : Kafka version: 3.9.1
2025-07-21T09:52:50.970+02:00  INFO 1 --- [nio-8080-exec-4] 
o.a.kafka.common.utils.AppInfoParser     : Kafka commitId: f745dfdcee2b9851
2025-07-21T09:52:50.970+02:00  INFO 1 --- [nio-8080-exec-4] 
o.a.kafka.common.utils.AppInfoParser     : Kafka startTimeMs: 1753084370967
2025-07-21T09:52:51.464+02:00  INFO 1 --- [ad | producer-1] 
org.apache.kafka.clients.Metadata        : [Producer clientId=producer-1] 
Cluster ID: 5L6g3nShT-eMCtK--X86sw
2025-07-21T09:52:51.572+02:00  INFO 1 --- [ad | producer-1] 
o.a.k.c.p.internals.TransactionManager   : [Producer clientId=producer-1] 
ProducerId set to 0 with epoch 0
2025-07-21T09:52:51.621+02:00  INFO 1 --- [nio-8080-exec-4] 
o.a.k.clients.producer.KafkaProducer     : [Producer clientId=producer-1] 
Closing the Kafka producer with timeoutMillis = 9223372036854775807 ms.
2025-07-21T09:52:51.633+02:00  INFO 1 --- [nio-8080-exec-4] 
o.apache.kafka.common.metrics.Metrics    : Metrics scheduler closed
2025-07-21T09:52:51.633+02:00  INFO 1 --- [nio-8080-exec-4] 
o.apache.kafka.common.metrics.Metrics    : Closing reporter 
org.apache.kafka.common.metrics.JmxReporter
2025-07-21T09:52:51.633+02:00  INFO 1 --- [nio-8080-exec-4] 
o.apache.kafka.common.metrics.Metrics    : Closing reporter 
org.apache.kafka.common.telemetry.internals.ClientTelemetryReporter
2025-07-21T09:52:51.633+02:00  INFO 1 --- [nio-8080-exec-4] 
o.apache.kafka.common.metrics.Metrics    : Metrics reporters closed
2025-07-21T09:52:51.634+02:00  INFO 1 --- [nio-8080-exec-4] 
o.a.kafka.common.utils.AppInfoParser     : App info kafka.producer for 
producer-1 unregistered
[main] INFO org.apache.kafka.common.telemetry.internals.KafkaMetricsCollector - 
initializing Kafka metrics collector
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version: 3.9.1
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId: 
f745dfdcee2b9851
[main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka startTimeMs: 
1753084121707
[main] INFO org.apache.kafka.clients.consumer.internals.ClassicKafkaConsumer - 
[Consumer clientId=consumer-jupiter-workers-1, groupId=jupiter-workers] 
Subscribed to topic(s): jupiter-events
[main] INFO org.apache.kafka.clients.Metadata - [Consumer 
clientId=consumer-jupiter-workers-1, groupId=jupiter-workers] Cluster ID: 
5L6g3nShT-eMCtK--X86sw
ProducerConfig values:
        acks = -1
        auto.include.jmx.reporter = true
        batch.size = 16384
        bootstrap.servers = [kafka-service.jupiter:9092]
        buffer.memory = 33554432
        client.dns.lookup = use_all_dns_ips
        client.id = producer-1
        compression.gzip.level = -1
        compression.lz4.level = 9
        compression.type = none
        compression.zstd.level = 3
        connections.max.idle.ms = 540000
        delivery.timeout.ms = 120000
        enable.idempotence = true
        enable.metrics.push = true
        interceptor.classes = []
        key.serializer = class 
org.apache.kafka.common.serialization.StringSerializer
        linger.ms = 0
        max.block.ms = 60000
        max.in.flight.requests.per.connection = 5
        max.request.size = 1048576
        metadata.max.age.ms = 300000
        metadata.max.idle.ms = 300000
        metadata.recovery.strategy = none
        metric.reporters = []
        metrics.num.samples = 2
        metrics.recording.level = INFO
        metrics.sample.window.ms = 30000
        partitioner.adaptive.partitioning.enable = true
        partitioner.availability.timeout.ms = 0
        partitioner.class = null
        partitioner.ignore.keys = false
        receive.buffer.bytes = 32768
        reconnect.backoff.max.ms = 1000
        reconnect.backoff.ms = 50
        request.timeout.ms = 30000
        retries = 2147483647
        retry.backoff.max.ms = 1000
        retry.backoff.ms = 100
        sasl.client.callback.handler.class = null
        sasl.jaas.config = null
        sasl.kerberos.kinit.cmd = /usr/bin/kinit
        sasl.kerberos.min.time.before.relogin = 60000
        sasl.kerberos.service.name = null
        sasl.kerberos.ticket.renew.jitter = 0.05
        sasl.kerberos.ticket.renew.window.factor = 0.8
        sasl.login.callback.handler.class = null
        sasl.login.class = null
        sasl.login.connect.timeout.ms = null
        sasl.login.read.timeout.ms = null
        sasl.login.refresh.buffer.seconds = 300
        sasl.login.refresh.min.period.seconds = 60
        sasl.login.refresh.window.factor = 0.8
        sasl.login.refresh.window.jitter = 0.05
        sasl.login.retry.backoff.max.ms = 10000
        sasl.login.retry.backoff.ms = 100
        sasl.mechanism = GSSAPI
        sasl.oauthbearer.clock.skew.seconds = 30
        sasl.oauthbearer.expected.audience = null
        sasl.oauthbearer.expected.issuer = null
        sasl.oauthbearer.header.urlencode = false
        sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
        sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
        sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
        sasl.oauthbearer.jwks.endpoint.url = null
        sasl.oauthbearer.scope.claim.name = scope
        sasl.oauthbearer.sub.claim.name = sub
        sasl.oauthbearer.token.endpoint.url = null
        security.protocol = PLAINTEXT
        security.providers = null
        send.buffer.bytes = 131072
        socket.connection.setup.timeout.max.ms = 30000
        socket.connection.setup.timeout.ms = 10000
        ssl.cipher.suites = null
        ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
        ssl.endpoint.identification.algorithm = https
        ssl.engine.factory.class = null
        ssl.key.password = null
        ssl.keymanager.algorithm = SunX509
        ssl.keystore.certificate.chain = null
        ssl.keystore.key = null
        ssl.keystore.location = null
        ssl.keystore.password = null
        ssl.keystore.type = JKS
        ssl.protocol = TLSv1.3
        ssl.provider = null
        ssl.secure.random.implementation = null
        ssl.trustmanager.algorithm = PKIX
        ssl.truststore.certificates = null
        ssl.truststore.location = null
        ssl.truststore.password = null
        ssl.truststore.type = JKS
        transaction.timeout.ms = 60000
        transactional.id = null
        value.serializer = class 
org.apache.kafka.common.serialization.StringSerializer
ConsumerConfig values:
        allow.auto.create.topics = true
        auto.commit.interval.ms = 5000
        auto.include.jmx.reporter = true
        auto.offset.reset = earliest
        bootstrap.servers = [kafka-service.jupiter:9092]
        check.crcs = true
        client.dns.lookup = use_all_dns_ips
        client.id = consumer-jupiter-workers-1
        client.rack =
        connections.max.idle.ms = 540000
        default.api.timeout.ms = 60000
        enable.auto.commit = true
        enable.metrics.push = true
        exclude.internal.topics = true
        fetch.max.bytes = 52428800
        fetch.max.wait.ms = 500
        fetch.min.bytes = 1
        group.id = jupiter-workers
        group.instance.id = null
        group.protocol = classic
        group.remote.assignor = null
        heartbeat.interval.ms = 3000
        interceptor.classes = []
        internal.leave.group.on.close = true
        internal.throw.on.fetch.stable.offset.unsupported = false
        isolation.level = read_uncommitted
        key.deserializer = class 
org.apache.kafka.common.serialization.StringDeserializer
        max.partition.fetch.bytes = 1048576
        max.poll.interval.ms = 300000
        max.poll.records = 500
        metadata.max.age.ms = 300000
        metadata.recovery.strategy = none
        metric.reporters = []
        metrics.num.samples = 2
        metrics.recording.level = INFO
        metrics.sample.window.ms = 30000
        partition.assignment.strategy = [class 
org.apache.kafka.clients.consumer.RangeAssignor, class 
org.apache.kafka.clients.consumer.CooperativeStickyAssignor]
        receive.buffer.bytes = 65536
        reconnect.backoff.max.ms = 1000
        reconnect.backoff.ms = 50
        request.timeout.ms = 30000
        retry.backoff.max.ms = 1000
        retry.backoff.ms = 100
        sasl.client.callback.handler.class = null
        sasl.jaas.config = null
        sasl.kerberos.kinit.cmd = /usr/bin/kinit
        sasl.kerberos.min.time.before.relogin = 60000
        sasl.kerberos.service.name = null
        sasl.kerberos.ticket.renew.jitter = 0.05
        sasl.kerberos.ticket.renew.window.factor = 0.8
        sasl.login.callback.handler.class = null
        sasl.login.class = null
        sasl.login.connect.timeout.ms = null
        sasl.login.read.timeout.ms = null
        sasl.login.refresh.buffer.seconds = 300
        sasl.login.refresh.min.period.seconds = 60
        sasl.login.refresh.window.factor = 0.8
        sasl.login.refresh.window.jitter = 0.05
        sasl.login.retry.backoff.max.ms = 10000
        sasl.login.retry.backoff.ms = 100
        sasl.mechanism = GSSAPI
        sasl.oauthbearer.clock.skew.seconds = 30
        sasl.oauthbearer.expected.audience = null
        sasl.oauthbearer.expected.issuer = null
        sasl.oauthbearer.header.urlencode = false
        sasl.oauthbearer.jwks.endpoint.refresh.ms = 3600000
        sasl.oauthbearer.jwks.endpoint.retry.backoff.max.ms = 10000
        sasl.oauthbearer.jwks.endpoint.retry.backoff.ms = 100
        sasl.oauthbearer.jwks.endpoint.url = null
        sasl.oauthbearer.scope.claim.name = scope
        sasl.oauthbearer.sub.claim.name = sub
        sasl.oauthbearer.token.endpoint.url = null
        security.protocol = PLAINTEXT
        security.providers = null
        send.buffer.bytes = 131072
        session.timeout.ms = 45000
        socket.connection.setup.timeout.max.ms = 30000
        socket.connection.setup.timeout.ms = 10000
        ssl.cipher.suites = null
        ssl.enabled.protocols = [TLSv1.2, TLSv1.3]
        ssl.endpoint.identification.algorithm = https
        ssl.engine.factory.class = null
        ssl.key.password = null
        ssl.keymanager.algorithm = SunX509
        ssl.keystore.certificate.chain = null
        ssl.keystore.key = null
        ssl.keystore.location = null
        ssl.keystore.password = null
        ssl.keystore.type = JKS
        ssl.protocol = TLSv1.3
        ssl.provider = null
        ssl.secure.random.implementation = null
        ssl.trustmanager.algorithm = PKIX
        ssl.truststore.certificates = null
        ssl.truststore.location = null
        ssl.truststore.password = null
        ssl.truststore.type = JKS
        value.deserializer = class 
org.apache.kafka.common.serialization.StringDeserializer

Reply via email to