Please help on this.
Thanks Amit On Fri, Jul 17, 2020 at 9:10 AM Amit Sharma <resolve...@gmail.com> wrote: > Hi, sometimes my spark streaming job throw this exception Futures timed > out after [300 seconds]. > I am not sure where is the default timeout configuration. Can i increase > it. Please help. > > > > Thanks > Amit > > > > Caused by: java.util.concurrent.TimeoutException: Futures timed out after > [300 seconds] > at > scala.concurrent.impl.Promise$DefaultPromise.ready(Promise.scala:219) > at > scala.concurrent.impl.Promise$DefaultPromise.result(Promise.scala:223) > at > org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:201) > at > org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:136) > at > org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:372) > at > org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:144) > at > org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:140) > at > org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155) > at > org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151) > at > org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) > at > org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:140) > at > org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:116) > at > org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenOuter(BroadcastHashJoinExec.scala:257) > at > org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:101) > at > org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:186) > at > org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:35) > at > org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:65) > at > org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:186) > at > org.apache.spark.sql.execution.SerializeFromObjectExec.consume(objects.scala:101) > at > org.apache.spark.sql.execution.SerializeFromObjectExec.doConsume(objects.scala:121) > at > org.apache.spark.sql.execution.CodegenSupport$class.constructDoConsumeFunction(WholeStageCodegenExec.scala:213) > at > org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:184) > at > org.apache.spark.sql.execution.MapElementsExec.consume(objects.scala:200) > at > org.apache.spark.sql.execution.MapElementsExec.doConsume(objects.scala:224) > at > org.apache.spark.sql.execution.CodegenSupport$class.constructDoConsumeFunction(WholeStageCodegenExec.scala:213) > at > org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:184) > at > org.apache.spark.sql.execution.DeserializeToObjectExec.consume(objects.scala:68) >