for (i <- 0 until distUsers.length) { val subsetData = sqlContext.sql("SELECT bidder_id, t.auction, time from BidsTable b inner join (select distinct auction from BidsTable where bidder_id='"+distUsers(i)+"') t on t.auction=b.auction order by t.auction, time").map(x=>(x(0),x(1),x(2))) val withIndex = subsetData.zipWithIndex val indexKey = withIndex.map{case (k,v) => (v,k)} indexKey.cache val timeDiff = new ListBuffer[Long]() for (j <- 1 until subsetData.count().toInt) { var current_auction = indexKey.lookup(j).map(_._2).head.toString() var past_auction = indexKey.lookup(j-1).map(_._2).head.toString() var current = indexKey.lookup(j).map(_._1).head.toString() var past = indexKey.lookup(j-1).map(_._1).head.toString() if(current.toString != past.toString && current.toString == distUsers(0) && current_auction==past_auction) { var current_time = indexKey.lookup(9).map(_._3).head.toString() var past_time = indexKey.lookup(0).map(_._3).head.toString() timeDiff += current_time.toLong - past_time.toLong } } bidder_timeDiff+= ((distUsers(0).toString,timeDiff.min.toString,timeDiff.min.toString)) }
Above is running infinitely... Kindly help on this. Regards, Ladle -- View this message in context: http://apache-spark-user-list.1001560.n3.nabble.com/Spark-program-running-infinitely-tp23565.html Sent from the Apache Spark User List mailing list archive at Nabble.com. --------------------------------------------------------------------- To unsubscribe, e-mail: user-unsubscr...@spark.apache.org For additional commands, e-mail: user-h...@spark.apache.org