Hey Jun, I think AdminTest.testShutdownBroker started failing consistently after this checkin.
-Jay On Sun, Nov 18, 2012 at 10:09 PM, Apache Jenkins Server < jenk...@builds.apache.org> wrote: > See <https://builds.apache.org/job/Kafka-0.8/113/changes> > > Changes: > > [junrao] move shutting down of fetcher thread out of critical path; > patched by Jun Rao; reviewed by Neha Narkhede; KAFKA-612 > > ------------------------------------------ > [...truncated 2862 lines...] > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.foreach(Map.scala:180) > at > scala.collection.TraversableLike$class.map(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.map(Map.scala:157) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321) > at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289) > at kafka.server.KafkaApis.handle(KafkaApis.scala:57) > at > kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41) > at java.lang.Thread.run(Thread.java:662) > [2012-11-19 06:09:14,085] ERROR [KafkaApi-0] error when processing request > (test1,-1,0,10000) (kafka.server.KafkaApis:102) > kafka.common.UnknownTopicOrPartitionException: Topic test1 partition -1 > doesn't exist on 0 > at > kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.foreach(Map.scala:180) > at > scala.collection.TraversableLike$class.map(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.map(Map.scala:157) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321) > at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289) > at kafka.server.KafkaApis.handle(KafkaApis.scala:57) > at > kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41) > at java.lang.Thread.run(Thread.java:662) > [0m[ [0minfo [0m] [0mTest Passed: > testProduceAndMultiFetch(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [0mTest Starting: > testMultiProduce(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testMultiProduce(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [0mTest Starting: > testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [0mTest Starting: > testEmptyFetchRequest(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testEmptyFetchRequest(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [0mTest Starting: > testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [0mTest Starting: > testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest) > [0m > [0m[ [0minfo [0m] [0mTest Passed: > testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest) > [0m > [0m[ [0minfo [0m] [0mTest Starting: > testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest) > [0m > [2012-11-19 06:09:18,635] ERROR [KafkaApi-0] error when processing request > (test2,0,-1,10000) (kafka.server.KafkaApis:102) > kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only > have log segments in the range 0 to 2. > at kafka.log.Log.read(Log.scala:371) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.foreach(Map.scala:180) > at > scala.collection.TraversableLike$class.map(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.map(Map.scala:157) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321) > at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289) > at kafka.server.KafkaApis.handle(KafkaApis.scala:57) > at > kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41) > at java.lang.Thread.run(Thread.java:662) > [2012-11-19 06:09:18,636] ERROR [KafkaApi-0] error when processing request > (test3,0,-1,10000) (kafka.server.KafkaApis:102) > kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only > have log segments in the range 0 to 2. > at kafka.log.Log.read(Log.scala:371) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.foreach(Map.scala:180) > at > scala.collection.TraversableLike$class.map(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.map(Map.scala:157) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321) > at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289) > at kafka.server.KafkaApis.handle(KafkaApis.scala:57) > at > kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41) > at java.lang.Thread.run(Thread.java:662) > [2012-11-19 06:09:18,636] ERROR [KafkaApi-0] error when processing request > (test4,0,-1,10000) (kafka.server.KafkaApis:102) > kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only > have log segments in the range 0 to 2. > at kafka.log.Log.read(Log.scala:371) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.foreach(Map.scala:180) > at > scala.collection.TraversableLike$class.map(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.map(Map.scala:157) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321) > at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289) > at kafka.server.KafkaApis.handle(KafkaApis.scala:57) > at > kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41) > at java.lang.Thread.run(Thread.java:662) > [2012-11-19 06:09:18,637] ERROR [KafkaApi-0] error when processing request > (test1,0,-1,10000) (kafka.server.KafkaApis:102) > kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only > have log segments in the range 0 to 2. > at kafka.log.Log.read(Log.scala:371) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.foreach(Map.scala:180) > at > scala.collection.TraversableLike$class.map(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.map(Map.scala:157) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321) > at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289) > at kafka.server.KafkaApis.handle(KafkaApis.scala:57) > at > kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41) > at java.lang.Thread.run(Thread.java:662) > [2012-11-19 06:09:18,639] ERROR [KafkaApi-0] error when processing request > (test2,-1,0,10000) (kafka.server.KafkaApis:102) > kafka.common.UnknownTopicOrPartitionException: Topic test2 partition -1 > doesn't exist on 0 > at > kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.foreach(Map.scala:180) > at > scala.collection.TraversableLike$class.map(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.map(Map.scala:157) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321) > at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289) > at kafka.server.KafkaApis.handle(KafkaApis.scala:57) > at > kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41) > at java.lang.Thread.run(Thread.java:662) > [2012-11-19 06:09:18,640] ERROR [KafkaApi-0] error when processing request > (test3,-1,0,10000) (kafka.server.KafkaApis:102) > kafka.common.UnknownTopicOrPartitionException: Topic test3 partition -1 > doesn't exist on 0 > at > kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.foreach(Map.scala:180) > at > scala.collection.TraversableLike$class.map(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.map(Map.scala:157) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321) > at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289) > at kafka.server.KafkaApis.handle(KafkaApis.scala:57) > at > kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41) > at java.lang.Thread.run(Thread.java:662) > [2012-11-19 06:09:18,640] ERROR [KafkaApi-0] error when processing request > (test4,-1,0,10000) (kafka.server.KafkaApis:102) > kafka.common.UnknownTopicOrPartitionException: Topic test4 partition -1 > doesn't exist on 0 > at > kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.foreach(Map.scala:180) > at > scala.collection.TraversableLike$class.map(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.map(Map.scala:157) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321) > at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289) > at kafka.server.KafkaApis.handle(KafkaApis.scala:57) > at > kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41) > at java.lang.Thread.run(Thread.java:662) > [2012-11-19 06:09:18,641] ERROR [KafkaApi-0] error when processing request > (test1,-1,0,10000) (kafka.server.KafkaApis:102) > kafka.common.UnknownTopicOrPartitionException: Topic test1 partition -1 > doesn't exist on 0 > at > kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325) > at > kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.foreach(Map.scala:180) > at > scala.collection.TraversableLike$class.map(TraversableLike.scala:206) > at scala.collection.immutable.Map$Map4.map(Map.scala:157) > at > kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321) > at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289) > at kafka.server.KafkaApis.handle(KafkaApis.scala:57) > at > kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41) > at java.lang.Thread.run(Thread.java:662) > [0m[ [0minfo [0m] [0mTest Passed: > testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest) > [0m > [0m[ [0minfo [0m] [0mTest Starting: > testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [0mTest Starting: > testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest) [0m > [0m[ [0minfo [0m] [34m== core-kafka / kafka.integration.PrimitiveApiTest > == [0m > [0m[ [0minfo [0m] [34m [0m > [0m[ [0minfo [0m] [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest > == [0m > [0m[ [0minfo [0m] [0mTest Starting: > testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest) [0m > log4j:WARN No appenders could be found for logger > (org.I0Itec.zkclient.ZkEventThread). > log4j:WARN Please initialize the log4j system properly. > [0m[ [0minfo [0m] [0mTest Passed: > testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest) [0m > [0m[ [0minfo [0m] [0mTest Starting: > testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest) [0m > [0m[ [0minfo [0m] [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest > == [0m > [0m[ [0minfo [0m] [34m [0m > [0m[ [0minfo [0m] [34m== core-kafka / kafka.metrics.KafkaTimerTest == [0m > [0m[ [0minfo [0m] [0mTest Starting: > testKafkaTimer(kafka.metrics.KafkaTimerTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testKafkaTimer(kafka.metrics.KafkaTimerTest) [0m > [0m[ [0minfo [0m] [34m== core-kafka / kafka.metrics.KafkaTimerTest == [0m > [0m[ [0minfo [0m] [34m [0m > [0m[ [0minfo [0m] [34m== core-kafka / > kafka.message.ByteBufferMessageSetTest == [0m > [0m[ [0minfo [0m] [0mTest Starting: testWrittenEqualsRead [0m > [0m[ [0minfo [0m] [0mTest Passed: testWrittenEqualsRead [0m > [0m[ [0minfo [0m] [0mTest Starting: testIteratorIsConsistent [0m > [0m[ [0minfo [0m] [0mTest Passed: testIteratorIsConsistent [0m > [0m[ [0minfo [0m] [0mTest Starting: testSizeInBytes [0m > [0m[ [0minfo [0m] [0mTest Passed: testSizeInBytes [0m > [0m[ [0minfo [0m] [0mTest Starting: testEquals [0m > [0m[ [0minfo [0m] [0mTest Passed: testEquals [0m > [0m[ [0minfo [0m] [0mTest Starting: testWriteTo [0m > [0m[ [0minfo [0m] [0mTest Passed: testWriteTo [0m > [0m[ [0minfo [0m] [0mTest Starting: testValidBytes [0m > [0m[ [0minfo [0m] [0mTest Passed: testValidBytes [0m > [0m[ [0minfo [0m] [0mTest Starting: testValidBytesWithCompression [0m > [0m[ [0minfo [0m] [0mTest Passed: testValidBytesWithCompression [0m > [0m[ [0minfo [0m] [0mTest Starting: testIterator [0m > [0m[ [0minfo [0m] [0mTest Passed: testIterator [0m > [0m[ [0minfo [0m] [0mTest Starting: testOffsetAssignment [0m > [0m[ [0minfo [0m] [0mTest Passed: testOffsetAssignment [0m > [0m[ [0minfo [0m] [34m== core-kafka / > kafka.message.ByteBufferMessageSetTest == [0m > [0m[ [0minfo [0m] [34m [0m > [0m[ [0minfo [0m] [34m== core-kafka / kafka.producer.ProducerTest == [0m > [0m[ [0minfo [0m] [0mTest Starting: > testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest) [0m > [0m[ [0minfo [0m] [0mTest Starting: > testSendToNewTopic(kafka.producer.ProducerTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testSendToNewTopic(kafka.producer.ProducerTest) [0m > [0m[ [0minfo [0m] [0mTest Starting: > testSendWithDeadBroker(kafka.producer.ProducerTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testSendWithDeadBroker(kafka.producer.ProducerTest) [0m > [0m[ [0minfo [0m] [0mTest Starting: > testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest) [0m > [0m[ [0minfo [0m] [34m== core-kafka / kafka.producer.ProducerTest == [0m > [0m[ [0minfo [0m] [34m [0m > [0m[ [0minfo [0m] [34m== core-kafka / kafka.integration.FetcherTest == > [0m > [0m[ [0minfo [0m] [0mTest Starting: > testFetcher(kafka.integration.FetcherTest) [0m > [0m[ [0minfo [0m] [0mTest Passed: > testFetcher(kafka.integration.FetcherTest) [0m > [0m[ [0minfo [0m] [34m== core-kafka / kafka.integration.FetcherTest == > [0m > [0m[ [0minfo [0m] [34m [0m > [0m[ [0minfo [0m] [34m== core-kafka / Test cleanup 1 == [0m > [0m[ [0minfo [0m] [0mDeleting directory /tmp/sbt_b5aaac46 [0m > [0m[ [0minfo [0m] [34m== core-kafka / Test cleanup 1 == [0m > [0m[ [0minfo [0m] [34m [0m > [0m[ [0minfo [0m] [34m== core-kafka / test-finish == [0m > [0m[ [31merror [0m] [0mFailed: : Total 167, Failed 2, Errors 0, Passed > 165, Skipped 0 [0m > [0m[ [0minfo [0m] [34m== core-kafka / test-finish == [0m > [0m[ [0minfo [0m] [34m [0m > [0m[ [0minfo [0m] [34m== core-kafka / test-cleanup == [0m > [0m[ [0minfo [0m] [34m== core-kafka / test-cleanup == [0m > [0m[ [0minfo [0m] [34m [0m > [0m[ [0minfo [0m] [34m== hadoop consumer / copy-test-resources == [0m > [0m[ [0minfo [0m] [34m== hadoop consumer / copy-test-resources == [0m > [0m[ [31merror [0m] [0mError running kafka.server.LogRecoveryTest: Test > FAILED [0m > [0m[ [31merror [0m] [0mError running kafka.admin.AdminTest: Test FAILED > [0m > [0m[ [31merror [0m] [0mError running test: One or more subtasks failed > [0m > [0m[ [0minfo [0m] [0m [0m > [0m[ [0minfo [0m] [0mTotal time: 233 s, completed Nov 19, 2012 6:09:42 > AM [0m > [0m[ [0minfo [0m] [0m [0m > [0m[ [0minfo [0m] [0mTotal session time: 233 s, completed Nov 19, 2012 > 6:09:42 AM [0m > [0m[ [31merror [0m] [0mError during build. [0m > Build step 'Execute shell' marked build as failure >