See
<https://ci-builds.apache.org/job/Kafka/job/kafka-trunk-jdk15/20/display/redirect?page=changes>
Changes:
[github] KAFKA-10395: relax output topic check in TTD to work with dynamic
routing (#9174)
------------------------------------------
[...truncated 2.52 MB...]
org.apache.kafka.streams.integration.HighAvailabilityTaskAssignorIntegrationTest
> shouldScaleOutWithWarmupTasksAndInMemoryStores STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldInheritRepartitionTopicPartitionNumberFromUpstreamTopicWhenNumberOfPartitionsIsNotSpecified[Optimization
= all] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldPerformKeySelectOperationWhenRepartitionOperationIsUsedWithKeySelector[Optimization
= all] STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldPerformKeySelectOperationWhenRepartitionOperationIsUsedWithKeySelector[Optimization
= all] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldThrowAnExceptionWhenNumberOfPartitionsOfRepartitionOperationDoNotMatchSourceTopicWhenJoining[Optimization
= all] STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldThrowAnExceptionWhenNumberOfPartitionsOfRepartitionOperationDoNotMatchSourceTopicWhenJoining[Optimization
= all] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldPerformSelectKeyWithRepartitionOperation[Optimization = none] STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldPerformSelectKeyWithRepartitionOperation[Optimization = none] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldDoProperJoiningWhenNumberOfPartitionsAreValidWhenUsingRepartitionOperation[Optimization
= none] STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldDoProperJoiningWhenNumberOfPartitionsAreValidWhenUsingRepartitionOperation[Optimization
= none] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldCreateRepartitionTopicWithSpecifiedNumberOfPartitions[Optimization =
none] STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldCreateRepartitionTopicWithSpecifiedNumberOfPartitions[Optimization =
none] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldUseStreamPartitionerForRepartitionOperation[Optimization = none] STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldUseStreamPartitionerForRepartitionOperation[Optimization = none] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldCreateRepartitionTopicIfKeyChangingOperationWasNotPerformed[Optimization
= none] STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldCreateRepartitionTopicIfKeyChangingOperationWasNotPerformed[Optimization
= none] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldGoThroughRebalancingCorrectly[Optimization = none] STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldGoThroughRebalancingCorrectly[Optimization = none] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldDeductNumberOfPartitionsFromRepartitionOperation[Optimization = none]
STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldDeductNumberOfPartitionsFromRepartitionOperation[Optimization = none]
PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldGenerateRepartitionTopicWhenNameIsNotSpecified[Optimization = none]
STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldGenerateRepartitionTopicWhenNameIsNotSpecified[Optimization = none] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldCreateOnlyOneRepartitionTopicWhenRepartitionIsFollowedByGroupByKey[Optimization
= none] STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldCreateOnlyOneRepartitionTopicWhenRepartitionIsFollowedByGroupByKey[Optimization
= none] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldInheritRepartitionTopicPartitionNumberFromUpstreamTopicWhenNumberOfPartitionsIsNotSpecified[Optimization
= none] STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldInheritRepartitionTopicPartitionNumberFromUpstreamTopicWhenNumberOfPartitionsIsNotSpecified[Optimization
= none] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldPerformKeySelectOperationWhenRepartitionOperationIsUsedWithKeySelector[Optimization
= none] STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldPerformKeySelectOperationWhenRepartitionOperationIsUsedWithKeySelector[Optimization
= none] PASSED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldThrowAnExceptionWhenNumberOfPartitionsOfRepartitionOperationDoNotMatchSourceTopicWhenJoining[Optimization
= none] STARTED
org.apache.kafka.streams.integration.KStreamRepartitionIntegrationTest >
shouldThrowAnExceptionWhenNumberOfPartitionsOfRepartitionOperationDoNotMatchSourceTopicWhenJoining[Optimization
= none] PASSED
org.apache.kafka.streams.integration.InternalTopicIntegrationTest >
shouldCompactTopicsForKeyValueStoreChangelogs STARTED
org.apache.kafka.streams.integration.InternalTopicIntegrationTest >
shouldCompactTopicsForKeyValueStoreChangelogs PASSED
org.apache.kafka.streams.integration.InternalTopicIntegrationTest >
shouldCompactAndDeleteTopicsForWindowStoreChangelogs STARTED
org.apache.kafka.streams.integration.InternalTopicIntegrationTest >
shouldCompactAndDeleteTopicsForWindowStoreChangelogs PASSED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleToQueryMapValuesState STARTED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleToQueryMapValuesState PASSED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldNotMakeStoreAvailableUntilAllStoresAvailable STARTED
org.apache.kafka.streams.integration.HighAvailabilityTaskAssignorIntegrationTest
> shouldScaleOutWithWarmupTasksAndInMemoryStores PASSED
org.apache.kafka.streams.integration.ResetIntegrationWithSslTest >
testReprocessingFromScratchAfterResetWithIntermediateInternalTopic STARTED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldNotMakeStoreAvailableUntilAllStoresAvailable PASSED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldAllowToQueryAfterThreadDied STARTED
org.apache.kafka.streams.integration.ResetIntegrationWithSslTest >
testReprocessingFromScratchAfterResetWithIntermediateInternalTopic PASSED
org.apache.kafka.streams.integration.ResetIntegrationWithSslTest >
testReprocessingFromScratchAfterResetWithIntermediateUserTopic STARTED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldAllowToQueryAfterThreadDied PASSED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleToQueryStateWithZeroSizedCache STARTED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleToQueryStateWithZeroSizedCache PASSED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleToQueryDuringRebalance STARTED
org.apache.kafka.streams.integration.ResetIntegrationWithSslTest >
testReprocessingFromScratchAfterResetWithIntermediateUserTopic PASSED
org.apache.kafka.streams.integration.ResetIntegrationWithSslTest >
testReprocessingFromScratchAfterResetWithoutIntermediateUserTopic STARTED
org.apache.kafka.streams.integration.ResetIntegrationWithSslTest >
testReprocessingFromScratchAfterResetWithoutIntermediateUserTopic PASSED
org.apache.kafka.streams.integration.KTableKTableForeignKeyInnerJoinMultiIntegrationTest
> shouldInnerJoinMultiPartitionQueryable STARTED
org.apache.kafka.streams.integration.KTableKTableForeignKeyInnerJoinMultiIntegrationTest
> shouldInnerJoinMultiPartitionQueryable PASSED
org.apache.kafka.streams.integration.GlobalThreadShutDownOrderTest >
shouldFinishGlobalStoreOperationOnShutDown STARTED
org.apache.kafka.streams.integration.GlobalThreadShutDownOrderTest >
shouldFinishGlobalStoreOperationOnShutDown PASSED
org.apache.kafka.streams.integration.StoreUpgradeIntegrationTest >
shouldMigrateInMemoryKeyValueStoreToTimestampedKeyValueStoreUsingPapi STARTED
org.apache.kafka.streams.integration.StoreUpgradeIntegrationTest >
shouldMigrateInMemoryKeyValueStoreToTimestampedKeyValueStoreUsingPapi PASSED
org.apache.kafka.streams.integration.StoreUpgradeIntegrationTest >
shouldProxyWindowStoreToTimestampedWindowStoreUsingPapi STARTED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleToQueryDuringRebalance PASSED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleToQueryMapValuesAfterFilterState STARTED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleToQueryMapValuesAfterFilterState PASSED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleToQueryFilterState STARTED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleToQueryFilterState PASSED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleQueryStandbyStateDuringRebalance STARTED
org.apache.kafka.streams.integration.StoreUpgradeIntegrationTest >
shouldProxyWindowStoreToTimestampedWindowStoreUsingPapi PASSED
org.apache.kafka.streams.integration.StoreUpgradeIntegrationTest >
shouldMigratePersistentKeyValueStoreToTimestampedKeyValueStoreUsingPapi STARTED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleQueryStandbyStateDuringRebalance PASSED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleToQueryStateWithNonZeroSizedCache STARTED
org.apache.kafka.streams.integration.StoreUpgradeIntegrationTest >
shouldMigratePersistentKeyValueStoreToTimestampedKeyValueStoreUsingPapi PASSED
org.apache.kafka.streams.integration.StoreUpgradeIntegrationTest >
shouldMigratePersistentWindowStoreToTimestampedWindowStoreUsingPapi STARTED
org.apache.kafka.streams.integration.QueryableStateIntegrationTest >
shouldBeAbleToQueryStateWithNonZeroSizedCache PASSED
org.apache.kafka.streams.integration.FineGrainedAutoResetIntegrationTest >
shouldOnlyReadRecordsWhereEarliestSpecifiedWithNoCommittedOffsetsWithGlobalAutoOffsetResetLatest
STARTED
org.apache.kafka.streams.integration.FineGrainedAutoResetIntegrationTest >
shouldOnlyReadRecordsWhereEarliestSpecifiedWithNoCommittedOffsetsWithGlobalAutoOffsetResetLatest
PASSED
org.apache.kafka.streams.integration.FineGrainedAutoResetIntegrationTest >
shouldThrowExceptionOverlappingPattern STARTED
org.apache.kafka.streams.integration.FineGrainedAutoResetIntegrationTest >
shouldThrowExceptionOverlappingPattern PASSED
org.apache.kafka.streams.integration.FineGrainedAutoResetIntegrationTest >
shouldThrowExceptionOverlappingTopic STARTED
org.apache.kafka.streams.integration.FineGrainedAutoResetIntegrationTest >
shouldThrowExceptionOverlappingTopic PASSED
org.apache.kafka.streams.integration.FineGrainedAutoResetIntegrationTest >
shouldOnlyReadRecordsWhereEarliestSpecifiedWithInvalidCommittedOffsets STARTED
org.apache.kafka.streams.integration.StoreUpgradeIntegrationTest >
shouldMigratePersistentWindowStoreToTimestampedWindowStoreUsingPapi PASSED
org.apache.kafka.streams.integration.StoreUpgradeIntegrationTest >
shouldProxyKeyValueStoreToTimestampedKeyValueStoreUsingPapi STARTED
org.apache.kafka.streams.integration.FineGrainedAutoResetIntegrationTest >
shouldOnlyReadRecordsWhereEarliestSpecifiedWithInvalidCommittedOffsets PASSED
org.apache.kafka.streams.integration.FineGrainedAutoResetIntegrationTest >
shouldOnlyReadRecordsWhereEarliestSpecifiedWithNoCommittedOffsetsWithDefaultGlobalAutoOffsetResetEarliest
STARTED
FATAL: command execution failed
java.io.IOException: Pipe closed after 0 cycles
at
org.apache.sshd.common.channel.ChannelPipedInputStream.read(ChannelPipedInputStream.java:118)
at
org.apache.sshd.common.channel.ChannelPipedInputStream.read(ChannelPipedInputStream.java:101)
at
hudson.remoting.FlightRecorderInputStream.read(FlightRecorderInputStream.java:91)
at
hudson.remoting.ChunkedInputStream.readHeader(ChunkedInputStream.java:73)
at
hudson.remoting.ChunkedInputStream.readUntilBreak(ChunkedInputStream.java:103)
at
hudson.remoting.ChunkedCommandTransport.readBlock(ChunkedCommandTransport.java:39)
at
hudson.remoting.AbstractSynchronousByteArrayCommandTransport.read(AbstractSynchronousByteArrayCommandTransport.java:34)
at
hudson.remoting.SynchronousCommandTransport$ReaderThread.run(SynchronousCommandTransport.java:63)
Caused: hudson.remoting.ChannelClosedException: Channel
"hudson.remoting.Channel@27e2f54a:H44": Remote call on H44 failed. The channel
is closing down or has closed down
at hudson.remoting.Channel.call(Channel.java:991)
at
hudson.remoting.RemoteInvocationHandler.invoke(RemoteInvocationHandler.java:285)
at com.sun.proxy.$Proxy164.isAlive(Unknown Source)
at hudson.Launcher$RemoteLauncher$ProcImpl.isAlive(Launcher.java:1147)
at hudson.Launcher$RemoteLauncher$ProcImpl.join(Launcher.java:1139)
at hudson.tasks.CommandInterpreter.join(CommandInterpreter.java:155)
at hudson.tasks.CommandInterpreter.perform(CommandInterpreter.java:109)
at hudson.tasks.CommandInterpreter.perform(CommandInterpreter.java:66)
at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
at
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:741)
at hudson.model.Build$BuildExecution.build(Build.java:206)
at hudson.model.Build$BuildExecution.doRun(Build.java:163)
at
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:504)
at hudson.model.Run.execute(Run.java:1880)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:428)
FATAL: Unable to delete script file /tmp/jenkins4476604087010782367.sh
java.io.IOException: Pipe closed after 0 cycles
at
org.apache.sshd.common.channel.ChannelPipedInputStream.read(ChannelPipedInputStream.java:118)
at
org.apache.sshd.common.channel.ChannelPipedInputStream.read(ChannelPipedInputStream.java:101)
at
hudson.remoting.FlightRecorderInputStream.read(FlightRecorderInputStream.java:91)
at
hudson.remoting.ChunkedInputStream.readHeader(ChunkedInputStream.java:73)
at
hudson.remoting.ChunkedInputStream.readUntilBreak(ChunkedInputStream.java:103)
at
hudson.remoting.ChunkedCommandTransport.readBlock(ChunkedCommandTransport.java:39)
at
hudson.remoting.AbstractSynchronousByteArrayCommandTransport.read(AbstractSynchronousByteArrayCommandTransport.java:34)
at
hudson.remoting.SynchronousCommandTransport$ReaderThread.run(SynchronousCommandTransport.java:63)
Caused: hudson.remoting.ChannelClosedException: Channel
"hudson.remoting.Channel@27e2f54a:H44": Remote call on H44 failed. The channel
is closing down or has closed down
at hudson.remoting.Channel.call(Channel.java:991)
at hudson.FilePath.act(FilePath.java:1069)
at hudson.FilePath.act(FilePath.java:1058)
at hudson.FilePath.delete(FilePath.java:1543)
at hudson.tasks.CommandInterpreter.perform(CommandInterpreter.java:123)
at hudson.tasks.CommandInterpreter.perform(CommandInterpreter.java:66)
at hudson.tasks.BuildStepMonitor$1.perform(BuildStepMonitor.java:20)
at
hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:741)
at hudson.model.Build$BuildExecution.build(Build.java:206)
at hudson.model.Build$BuildExecution.doRun(Build.java:163)
at
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:504)
at hudson.model.Run.execute(Run.java:1880)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:428)
Build step 'Execute shell' marked build as failure
FATAL: Channel "hudson.remoting.Channel@27e2f54a:H44": Remote call on H44
failed. The channel is closing down or has closed down
java.io.IOException: Pipe closed after 0 cycles
at
org.apache.sshd.common.channel.ChannelPipedInputStream.read(ChannelPipedInputStream.java:118)
at
org.apache.sshd.common.channel.ChannelPipedInputStream.read(ChannelPipedInputStream.java:101)
at
hudson.remoting.FlightRecorderInputStream.read(FlightRecorderInputStream.java:91)
at
hudson.remoting.ChunkedInputStream.readHeader(ChunkedInputStream.java:73)
at
hudson.remoting.ChunkedInputStream.readUntilBreak(ChunkedInputStream.java:103)
at
hudson.remoting.ChunkedCommandTransport.readBlock(ChunkedCommandTransport.java:39)
at
hudson.remoting.AbstractSynchronousByteArrayCommandTransport.read(AbstractSynchronousByteArrayCommandTransport.java:34)
at
hudson.remoting.SynchronousCommandTransport$ReaderThread.run(SynchronousCommandTransport.java:63)
Caused: hudson.remoting.ChannelClosedException: Channel
"hudson.remoting.Channel@27e2f54a:H44": Remote call on H44 failed. The channel
is closing down or has closed down
at hudson.remoting.Channel.call(Channel.java:991)
at hudson.Launcher$RemoteLauncher.kill(Launcher.java:1083)
at
hudson.model.AbstractBuild$AbstractBuildExecution.run(AbstractBuild.java:510)
at hudson.model.Run.execute(Run.java:1880)
at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
at hudson.model.ResourceController.execute(ResourceController.java:97)
at hudson.model.Executor.run(Executor.java:428)
Recording test results
Agent went offline during the build
ERROR: Connection was broken: java.io.IOException: Pipe closed after 0 cycles
at
org.apache.sshd.common.channel.ChannelPipedInputStream.read(ChannelPipedInputStream.java:118)
at
org.apache.sshd.common.channel.ChannelPipedInputStream.read(ChannelPipedInputStream.java:101)
at
hudson.remoting.FlightRecorderInputStream.read(FlightRecorderInputStream.java:91)
at
hudson.remoting.ChunkedInputStream.readHeader(ChunkedInputStream.java:73)
at
hudson.remoting.ChunkedInputStream.readUntilBreak(ChunkedInputStream.java:103)
at
hudson.remoting.ChunkedCommandTransport.readBlock(ChunkedCommandTransport.java:39)
at
hudson.remoting.AbstractSynchronousByteArrayCommandTransport.read(AbstractSynchronousByteArrayCommandTransport.java:34)
at
hudson.remoting.SynchronousCommandTransport$ReaderThread.run(SynchronousCommandTransport.java:63)
Build step 'Publish JUnit test result report' marked build as failure
ERROR: H44 is offline; cannot locate JDK 15 (latest)
Not sending mail to unregistered user [email protected]