Kay Ousterhout created SPARK-19989:
--------------------------------------

             Summary: Flaky Test: 
org.apache.spark.sql.kafka010.KafkaSourceStressSuite
                 Key: SPARK-19989
                 URL: https://issues.apache.org/jira/browse/SPARK-19989
             Project: Spark
          Issue Type: Bug
          Components: SQL, Tests
    Affects Versions: 2.2.0
            Reporter: Kay Ousterhout
            Priority: Minor


This test failed recently here: 
https://amplab.cs.berkeley.edu/jenkins//job/SparkPullRequestBuilder/74683/testReport/junit/org.apache.spark.sql.kafka010/KafkaSourceStressSuite/stress_test_with_multiple_topics_and_partitions/

And based on Josh's dashboard 
(https://spark-tests.appspot.com/test-details?suite_name=org.apache.spark.sql.kafka010.KafkaSourceStressSuite&test_name=stress+test+with+multiple+topics+and+partitions),
 seems to fail a few times every month.  Here's the full error from the most 
recent failure:

Error Message

org.scalatest.exceptions.TestFailedException:  Error adding data: replication 
factor: 1 larger than available brokers: 0 
kafka.admin.AdminUtils$.assignReplicasToBrokers(AdminUtils.scala:117)  
kafka.admin.AdminUtils$.createTopic(AdminUtils.scala:403)  
org.apache.spark.sql.kafka010.KafkaTestUtils.createTopic(KafkaTestUtils.scala:173)
  
org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$16$$anonfun$apply$mcV$sp$17$$anonfun$37.apply(KafkaSourceSuite.scala:903)
  
org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$16$$anonfun$apply$mcV$sp$17$$anonfun$37.apply(KafkaSourceSuite.scala:901)
  
org.apache.spark.sql.kafka010.KafkaSourceTest$AddKafkaData$$anonfun$addData$1.apply(KafkaSourceSuite.scala:93)
  
org.apache.spark.sql.kafka010.KafkaSourceTest$AddKafkaData$$anonfun$addData$1.apply(KafkaSourceSuite.scala:92)
  scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:316)  
org.apache.spark.sql.kafka010.KafkaSourceTest$AddKafkaData.addData(KafkaSourceSuite.scala:92)
  
org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1.apply(StreamTest.scala:494)
   == Progress ==    AssertOnQuery(<condition>, )    CheckAnswer:     
StopStream    
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@5d888be0,Map()) 
   AddKafkaData(topics = Set(stress4, stress2, stress1, stress5, stress3), data 
= Range(0, 1, 2, 3, 4, 5, 6, 7, 8), message = )    CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9]    StopStream    
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@1be724ee,Map()) 
   AddKafkaData(topics = Set(stress4, stress2, stress1, stress5, stress3), data 
= Range(9, 10, 11, 12, 13, 14), message = )    CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15]    StopStream 
   AddKafkaData(topics = Set(stress4, stress2, stress1, stress5, stress3), data 
= Range(), message = ) => AddKafkaData(topics = Set(stress4, stress6, stress2, 
stress1, stress5, stress3), data = Range(15), message = Add topic stress7)    
AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5, 
stress3), data = Range(16, 17, 18, 19, 20, 21, 22), message = Add partition)    
AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5, 
stress3), data = Range(23, 24), message = Add partition)    AddKafkaData(topics 
= Set(stress4, stress6, stress2, stress8, stress1, stress5, stress3), data = 
Range(), message = Add topic stress9)    AddKafkaData(topics = Set(stress4, 
stress6, stress2, stress8, stress1, stress5, stress3), data = Range(25, 26, 27, 
28, 29, 30, 31, 32, 33), message = )    AddKafkaData(topics = Set(stress4, 
stress6, stress2, stress8, stress1, stress5, stress3), data = Range(), message 
= )    AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3), data = Range(), message = )    AddKafkaData(topics = 
Set(stress4, stress6, stress2, stress8, stress1, stress5, stress3), data = 
Range(34, 35, 36, 37, 38, 39), message = )    AddKafkaData(topics = 
Set(stress4, stress6, stress2, stress8, stress1, stress5, stress3), data = 
Range(40, 41, 42, 43), message = )    AddKafkaData(topics = Set(stress4, 
stress6, stress2, stress8, stress1, stress5, stress3), data = Range(44), 
message = Add partition)    AddKafkaData(topics = Set(stress4, stress6, 
stress2, stress8, stress1, stress5, stress3), data = Range(45, 46, 47, 48, 49, 
50, 51, 52), message = Add partition)    AddKafkaData(topics = Set(stress4, 
stress6, stress2, stress8, stress1, stress5, stress3), data = Range(53, 54, 
55), message = )    AddKafkaData(topics = Set(stress4, stress6, stress2, 
stress8, stress1, stress5, stress3), data = Range(56, 57, 58, 59, 60, 61, 62, 
63), message = Add partition)    AddKafkaData(topics = Set(stress4, stress6, 
stress2, stress8, stress1, stress5, stress3), data = Range(64, 65, 66, 67, 68, 
69, 70), message = )    
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@65068637,Map()) 
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3, stress10), data = Range(71, 72, 73, 74, 75, 76, 77, 78, 79), 
message = Add topic stress11)    AddKafkaData(topics = Set(stress4, stress6, 
stress2, stress8, stress1, stress5, stress3, stress10), data = Range(80, 81, 
82), message = )    AddKafkaData(topics = Set(stress4, stress6, stress2, 
stress8, stress1, stress5, stress3, stress10), data = Range(83), message = )    
AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, stress5, 
stress3, stress10), data = Range(84), message = )    AddKafkaData(topics = 
Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = 
Range(85, 86), message = Delete topic stress3)    AddKafkaData(topics = 
Set(stress4, stress6, stress2, stress8, stress1, stress5, stress10), data = 
Range(87, 88, 89, 90, 91, 92, 93, 94, 95), message = )    CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96]
    StopStream    
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@73e0e11f,Map()) 
   CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96]
    StopStream    AddKafkaData(topics = Set(stress4, stress6, stress12, 
stress2, stress8, stress1, stress5, stress10), data = Range(96, 97), message = 
Add topic stress13)    AddKafkaData(topics = Set(stress4, stress6, stress12, 
stress2, stress8, stress1, stress5, stress10), data = Range(98, 99, 100, 101), 
message = )    AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, 
stress8, stress1, stress5, stress10), data = Range(102, 103, 104, 105, 106, 
107, 108), message = )    AddKafkaData(topics = Set(stress4, stress6, stress12, 
stress2, stress8, stress1, stress5, stress10), data = Range(109, 110, 111, 112, 
113, 114), message = )    AddKafkaData(topics = Set(stress4, stress6, stress12, 
stress2, stress8, stress1, stress5, stress10), data = Range(115, 116, 117, 118, 
119, 120, 121, 122), message = )    AddKafkaData(topics = Set(stress4, stress6, 
stress12, stress2, stress8, stress1, stress5, stress10), data = Range(123, 124, 
125, 126, 127, 128), message = )    
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@13254b5c,Map()) 
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, 
stress8, stress1, stress5, stress10), data = Range(129), message = Add topic 
stress15)    AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, 
stress2, stress8, stress1, stress5), data = Range(130, 131, 132, 133, 134, 135, 
136), message = Delete topic stress10)    CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137]
    CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137]
    StopStream    
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@2bd2f9f,Map())  
  AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, 
stress8, stress1, stress5), data = Range(), message = )    AddKafkaData(topics 
= Set(stress14, stress4, stress6, stress12, stress2, stress8, stress1, 
stress5), data = Range(137), message = )    CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138]
    StopStream    AddKafkaData(topics = Set(stress14, stress4, stress6, 
stress12, stress2, stress8, stress1, stress5), data = Range(138), message = )   
 AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, 
stress8, stress1, stress5), data = Range(139), message = )    
AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, 
stress8, stress1, stress5, stress16), data = Range(140, 141, 142, 143, 144, 
145, 146), message = Add topic stress17)    
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@535b36c1,Map()) 
   CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138],[139],[140],[141],[142],[143],[144],[145],[146],[147]
  == Stream == Output Mode: Append Stream state: not started Thread state: dead 
  == Sink == 0:  1: [1] 2: [6] [2] [3] [5] [4] 3: [7] [8] [9] 4: [10] 5: [13] 
[12] [15] [11] [14]   == Plan ==                     
Stacktrace

sbt.ForkMain$ForkError: org.scalatest.exceptions.TestFailedException: 
Error adding data: replication factor: 1 larger than available brokers: 0
kafka.admin.AdminUtils$.assignReplicasToBrokers(AdminUtils.scala:117)
        kafka.admin.AdminUtils$.createTopic(AdminUtils.scala:403)
        
org.apache.spark.sql.kafka010.KafkaTestUtils.createTopic(KafkaTestUtils.scala:173)
        
org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$16$$anonfun$apply$mcV$sp$17$$anonfun$37.apply(KafkaSourceSuite.scala:903)
        
org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$16$$anonfun$apply$mcV$sp$17$$anonfun$37.apply(KafkaSourceSuite.scala:901)
        
org.apache.spark.sql.kafka010.KafkaSourceTest$AddKafkaData$$anonfun$addData$1.apply(KafkaSourceSuite.scala:93)
        
org.apache.spark.sql.kafka010.KafkaSourceTest$AddKafkaData$$anonfun$addData$1.apply(KafkaSourceSuite.scala:92)
        scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:316)
        
org.apache.spark.sql.kafka010.KafkaSourceTest$AddKafkaData.addData(KafkaSourceSuite.scala:92)
        
org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1.apply(StreamTest.scala:494)


== Progress ==
   AssertOnQuery(<condition>, )
   CheckAnswer: 
   StopStream
   
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@5d888be0,Map())
   AddKafkaData(topics = Set(stress4, stress2, stress1, stress5, stress3), data 
= Range(0, 1, 2, 3, 4, 5, 6, 7, 8), message = )
   CheckAnswer: [1],[2],[3],[4],[5],[6],[7],[8],[9]
   StopStream
   
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@1be724ee,Map())
   AddKafkaData(topics = Set(stress4, stress2, stress1, stress5, stress3), data 
= Range(9, 10, 11, 12, 13, 14), message = )
   CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15]
   StopStream
   AddKafkaData(topics = Set(stress4, stress2, stress1, stress5, stress3), data 
= Range(), message = )
=> AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5, 
stress3), data = Range(15), message = Add topic stress7)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5, 
stress3), data = Range(16, 17, 18, 19, 20, 21, 22), message = Add partition)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress1, stress5, 
stress3), data = Range(23, 24), message = Add partition)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3), data = Range(), message = Add topic stress9)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3), data = Range(25, 26, 27, 28, 29, 30, 31, 32, 33), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3), data = Range(), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3), data = Range(), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3), data = Range(34, 35, 36, 37, 38, 39), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3), data = Range(40, 41, 42, 43), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3), data = Range(44), message = Add partition)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3), data = Range(45, 46, 47, 48, 49, 50, 51, 52), message = Add 
partition)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3), data = Range(53, 54, 55), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3), data = Range(56, 57, 58, 59, 60, 61, 62, 63), message = Add 
partition)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3), data = Range(64, 65, 66, 67, 68, 69, 70), message = )
   
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@65068637,Map())
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3, stress10), data = Range(71, 72, 73, 74, 75, 76, 77, 78, 79), 
message = Add topic stress11)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3, stress10), data = Range(80, 81, 82), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3, stress10), data = Range(83), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress3, stress10), data = Range(84), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress10), data = Range(85, 86), message = Delete topic stress3)
   AddKafkaData(topics = Set(stress4, stress6, stress2, stress8, stress1, 
stress5, stress10), data = Range(87, 88, 89, 90, 91, 92, 93, 94, 95), message = 
)
   CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96]
   StopStream
   
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@73e0e11f,Map())
   CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96]
   StopStream
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, 
stress1, stress5, stress10), data = Range(96, 97), message = Add topic stress13)
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, 
stress1, stress5, stress10), data = Range(98, 99, 100, 101), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, 
stress1, stress5, stress10), data = Range(102, 103, 104, 105, 106, 107, 108), 
message = )
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, 
stress1, stress5, stress10), data = Range(109, 110, 111, 112, 113, 114), 
message = )
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, 
stress1, stress5, stress10), data = Range(115, 116, 117, 118, 119, 120, 121, 
122), message = )
   AddKafkaData(topics = Set(stress4, stress6, stress12, stress2, stress8, 
stress1, stress5, stress10), data = Range(123, 124, 125, 126, 127, 128), 
message = )
   
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@13254b5c,Map())
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, 
stress8, stress1, stress5, stress10), data = Range(129), message = Add topic 
stress15)
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, 
stress8, stress1, stress5), data = Range(130, 131, 132, 133, 134, 135, 136), 
message = Delete topic stress10)
   CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137]
   CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137]
   StopStream
   
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@2bd2f9f,Map())
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, 
stress8, stress1, stress5), data = Range(), message = )
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, 
stress8, stress1, stress5), data = Range(137), message = )
   CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138]
   StopStream
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, 
stress8, stress1, stress5), data = Range(138), message = )
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, 
stress8, stress1, stress5), data = Range(139), message = )
   AddKafkaData(topics = Set(stress14, stress4, stress6, stress12, stress2, 
stress8, stress1, stress5, stress16), data = Range(140, 141, 142, 143, 144, 
145, 146), message = Add topic stress17)
   
StartStream(ProcessingTime(0),org.apache.spark.util.SystemClock@535b36c1,Map())
   CheckAnswer: 
[1],[2],[3],[4],[5],[6],[7],[8],[9],[10],[11],[12],[13],[14],[15],[16],[17],[18],[19],[20],[21],[22],[23],[24],[25],[26],[27],[28],[29],[30],[31],[32],[33],[34],[35],[36],[37],[38],[39],[40],[41],[42],[43],[44],[45],[46],[47],[48],[49],[50],[51],[52],[53],[54],[55],[56],[57],[58],[59],[60],[61],[62],[63],[64],[65],[66],[67],[68],[69],[70],[71],[72],[73],[74],[75],[76],[77],[78],[79],[80],[81],[82],[83],[84],[85],[86],[87],[88],[89],[90],[91],[92],[93],[94],[95],[96],[97],[98],[99],[100],[101],[102],[103],[104],[105],[106],[107],[108],[109],[110],[111],[112],[113],[114],[115],[116],[117],[118],[119],[120],[121],[122],[123],[124],[125],[126],[127],[128],[129],[130],[131],[132],[133],[134],[135],[136],[137],[138],[139],[140],[141],[142],[143],[144],[145],[146],[147]

== Stream ==
Output Mode: Append
Stream state: not started
Thread state: dead


== Sink ==
0: 
1: [1]
2: [6] [2] [3] [5] [4]
3: [7] [8] [9]
4: [10]
5: [13] [12] [15] [11] [14]


== Plan ==

         
         
        at 
org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:495)
        at 
org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555)
        at org.scalatest.Assertions$class.fail(Assertions.scala:1328)
        at org.scalatest.FunSuite.fail(FunSuite.scala:1555)
        at 
org.apache.spark.sql.streaming.StreamTest$class.failTest$1(StreamTest.scala:347)
        at 
org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1.apply(StreamTest.scala:520)
        at 
org.apache.spark.sql.streaming.StreamTest$$anonfun$liftedTree1$1$1.apply(StreamTest.scala:357)
        at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at 
org.apache.spark.sql.streaming.StreamTest$class.liftedTree1$1(StreamTest.scala:357)
        at 
org.apache.spark.sql.streaming.StreamTest$class.testStream(StreamTest.scala:356)
        at 
org.apache.spark.sql.kafka010.KafkaSourceTest.testStream(KafkaSourceSuite.scala:45)
        at 
org.apache.spark.sql.streaming.StreamTest$class.runStressTest(StreamTest.scala:649)
        at 
org.apache.spark.sql.kafka010.KafkaSourceTest.runStressTest(KafkaSourceSuite.scala:45)
        at 
org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$16.apply$mcV$sp(KafkaSourceSuite.scala:893)
        at 
org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$16.apply(KafkaSourceSuite.scala:873)
        at 
org.apache.spark.sql.kafka010.KafkaSourceStressSuite$$anonfun$16.apply(KafkaSourceSuite.scala:873)
        at 
org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
        at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
        at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
        at org.scalatest.Transformer.apply(Transformer.scala:22)
        at org.scalatest.Transformer.apply(Transformer.scala:20)
        at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
        at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
        at 
org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
        at 
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at 
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
        at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
        at 
org.apache.spark.sql.kafka010.KafkaSourceTest.org$scalatest$BeforeAndAfterEach$$super$runTest(KafkaSourceSuite.scala:45)
        at 
org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:255)
        at 
org.apache.spark.sql.kafka010.KafkaSourceTest.runTest(KafkaSourceSuite.scala:45)
        at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
        at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
        at 
org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
        at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
        at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
        at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
        at org.scalatest.Suite$class.run(Suite.scala:1424)
        at 
org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
        at 
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at 
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
        at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
        at 
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
        at 
org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
        at 
org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
        at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
        at 
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:357)
        at 
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:502)
        at sbt.ForkMain$Run$2.call(ForkMain.java:296)
        at sbt.ForkMain$Run$2.call(ForkMain.java:286)
        at java.util.concurrent.FutureTask.run(FutureTask.java:266)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
        at java.lang.Thread.run(Thread.java:745)





--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to