[ 
https://issues.apache.org/jira/browse/SPARK-19875?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15903584#comment-15903584
 ] 

Kazuaki Ishizaki commented on SPARK-19875:
------------------------------------------

I got the following stack trace. This stuck seems to occur at constraint 
propagation.
When I applied SPARK-19846 to this, I do not get stuck with a 50-column csv 
dataset.

{code}
"ScalaTest-run-running-Test" #1 prio=5 os_prio=0 tid=0x0000000002a41000 
nid=0x1bb4c runnable [0x0000000002d5b000]
   java.lang.Thread.State: RUNNABLE
        at 
scala.collection.mutable.FlatHashTable$class.addEntry(FlatHashTable.scala:148)
        at scala.collection.mutable.HashSet.addEntry(HashSet.scala:40)
        at 
scala.collection.mutable.FlatHashTable$class.growTable(FlatHashTable.scala:225)
        at 
scala.collection.mutable.FlatHashTable$class.addEntry(FlatHashTable.scala:159)
        at scala.collection.mutable.HashSet.addEntry(HashSet.scala:40)
        at 
scala.collection.mutable.FlatHashTable$class.addElem(FlatHashTable.scala:139)
        at scala.collection.mutable.HashSet.addElem(HashSet.scala:40)
        at scala.collection.mutable.HashSet.$plus$eq(HashSet.scala:59)
        at scala.collection.mutable.HashSet.$plus$eq(HashSet.scala:40)
        at 
scala.collection.generic.Growable$$anonfun$$plus$plus$eq$1.apply(Growable.scala:59)
        at 
scala.collection.generic.Growable$$anonfun$$plus$plus$eq$1.apply(Growable.scala:59)
        at scala.collection.mutable.HashSet.foreach(HashSet.scala:78)
        at 
scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
        at scala.collection.mutable.AbstractSet.$plus$plus$eq(Set.scala:46)
        at scala.collection.mutable.HashSet.clone(HashSet.scala:83)
        at scala.collection.mutable.HashSet.clone(HashSet.scala:40)
        at 
org.apache.spark.sql.catalyst.expressions.ExpressionSet.$plus(ExpressionSet.scala:65)
        at 
org.apache.spark.sql.catalyst.expressions.ExpressionSet.$plus(ExpressionSet.scala:50)
        at 
scala.collection.SetLike$$anonfun$$plus$plus$1.apply(SetLike.scala:141)
        at 
scala.collection.SetLike$$anonfun$$plus$plus$1.apply(SetLike.scala:141)
        at 
scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
        at 
scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:157)
        at 
scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:316)
        at 
scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:972)
        at 
scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:972)
        at 
scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:972)
        at 
scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:157)
        at scala.collection.AbstractTraversable.foldLeft(Traversable.scala:104)
        at 
scala.collection.TraversableOnce$class.$div$colon(TraversableOnce.scala:151)
        at 
scala.collection.AbstractTraversable.$div$colon(Traversable.scala:104)
        at scala.collection.SetLike$class.$plus$plus(SetLike.scala:141)
        at 
org.apache.spark.sql.catalyst.expressions.ExpressionSet.$plus$plus(ExpressionSet.scala:50)
        at 
org.apache.spark.sql.catalyst.plans.logical.UnaryNode$$anonfun$getAliasedConstraints$1.apply(LogicalPlan.scala:325)
        at 
org.apache.spark.sql.catalyst.plans.logical.UnaryNode$$anonfun$getAliasedConstraints$1.apply(LogicalPlan.scala:322)
        at 
scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
        at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
        at 
org.apache.spark.sql.catalyst.plans.logical.UnaryNode.getAliasedConstraints(LogicalPlan.scala:322)
        at 
org.apache.spark.sql.catalyst.plans.logical.Project.validConstraints(basicLogicalOperators.scala:57)
        at 
org.apache.spark.sql.catalyst.plans.QueryPlan.constraints$lzycompute(QueryPlan.scala:187)
        - locked <0x0000000682bd9118> (a 
org.apache.spark.sql.catalyst.plans.logical.Project)
        at 
org.apache.spark.sql.catalyst.plans.QueryPlan.constraints(QueryPlan.scala:187)
        at 
org.apache.spark.sql.catalyst.plans.logical.Filter.validConstraints(basicLogicalOperators.scala:130)
        at 
org.apache.spark.sql.catalyst.plans.QueryPlan.constraints$lzycompute(QueryPlan.scala:187)
        - locked <0x0000000682bd9188> (a 
org.apache.spark.sql.catalyst.plans.logical.Filter)
        at 
org.apache.spark.sql.catalyst.plans.QueryPlan.constraints(QueryPlan.scala:187)
        at 
org.apache.spark.sql.catalyst.optimizer.InferFiltersFromConstraints$$anonfun$apply$13.applyOrElse(Optimizer.scala:612)
        at 
org.apache.spark.sql.catalyst.optimizer.InferFiltersFromConstraints$$anonfun$apply$13.applyOrElse(Optimizer.scala:610)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$2.apply(TreeNode.scala:267)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$2.apply(TreeNode.scala:267)
        at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:70)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:266)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:272)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:272)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:306)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:304)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:272)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:272)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformDown$1.apply(TreeNode.scala:272)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:306)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:187)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.mapChildren(TreeNode.scala:304)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:272)
        at 
org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:256)
        at 
org.apache.spark.sql.catalyst.optimizer.InferFiltersFromConstraints$.apply(Optimizer.scala:610)
        at 
org.apache.spark.sql.catalyst.optimizer.InferFiltersFromConstraints$.apply(Optimizer.scala:609)
        at 
org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:85)
        at 
org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:82)
        at 
scala.collection.IndexedSeqOptimized$class.foldl(IndexedSeqOptimized.scala:57)
        at 
scala.collection.IndexedSeqOptimized$class.foldLeft(IndexedSeqOptimized.scala:66)
        at scala.collection.mutable.WrappedArray.foldLeft(WrappedArray.scala:35)
        at 
org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:82)
        at 
org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:74)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at 
org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:74)
        at 
org.apache.spark.sql.execution.QueryExecution.optimizedPlan$lzycompute(QueryExecution.scala:74)
        - locked <0x0000000682bd9438> (a 
org.apache.spark.sql.execution.QueryExecution)
        at 
org.apache.spark.sql.execution.QueryExecution.optimizedPlan(QueryExecution.scala:74)
        at 
org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:80)
        - locked <0x0000000682bd9438> (a 
org.apache.spark.sql.execution.QueryExecution)
        at 
org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:76)
        at 
org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:85)
        - locked <0x0000000682bd9438> (a 
org.apache.spark.sql.execution.QueryExecution)
        at 
org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:85)
        at org.apache.spark.sql.Dataset.withAction(Dataset.scala:2786)
        at org.apache.spark.sql.Dataset.head(Dataset.scala:2123)
        at org.apache.spark.sql.Dataset.take(Dataset.scala:2336)
        at org.apache.spark.sql.Dataset.showString(Dataset.scala:245)
        at org.apache.spark.sql.Dataset.show(Dataset.scala:640)
        at org.apache.spark.sql.Dataset.show(Dataset.scala:599)
        at org.apache.spark.sql.Test$$anonfun$28.apply$mcV$sp(MySuite.scala:384)
        at org.apache.spark.sql.Test$$anonfun$28.apply(MySuite.scala:354)
        at org.apache.spark.sql.Test$$anonfun$28.apply(MySuite.scala:354)
        at 
org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
        at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
        at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
        at org.scalatest.Transformer.apply(Transformer.scala:22)
        at org.scalatest.Transformer.apply(Transformer.scala:20)
        at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
        at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:68)
        at 
org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
        at 
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at 
org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
        at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
        at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
        at 
org.apache.spark.sql.Test.org$scalatest$BeforeAndAfterEach$$super$runTest(MySuite.scala:183)
        at 
org.scalatest.BeforeAndAfterEach$class.runTest(BeforeAndAfterEach.scala:255)
        at org.apache.spark.sql.Test.runTest(MySuite.scala:183)
        at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at 
org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
        at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
        at 
org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
        at 
org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
        at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
        at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
        at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
        at org.scalatest.Suite$class.run(Suite.scala:1424)
        at 
org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
        at 
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at 
org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
        at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
        at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
        at 
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:31)
        at 
org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
        at 
org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
        at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:31)
        at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
        at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
        at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
        at scala.collection.immutable.List.foreach(List.scala:381)
        at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
        at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
        at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
        at 
org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
        at 
org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
        at org.scalatest.tools.Runner$.run(Runner.scala:883)
        at org.scalatest.tools.Runner.run(Runner.scala)
        at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:138)
        at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
        at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:498)
        at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)
{code}

> Map->filter on many columns gets stuck in constraint inference optimization 
> code
> --------------------------------------------------------------------------------
>
>                 Key: SPARK-19875
>                 URL: https://issues.apache.org/jira/browse/SPARK-19875
>             Project: Spark
>          Issue Type: Bug
>          Components: SQL
>    Affects Versions: 2.1.0
>            Reporter: Jay Pranavamurthi
>         Attachments: test10cols.csv, test50cols.csv, TestFilter.scala
>
>
> The attached code (TestFilter.scala) works with a 10-column csv dataset, but 
> gets stuck with a 50-column csv dataset. Both datasets are attached.



--
This message was sent by Atlassian JIRA
(v6.3.15#6346)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to