[ https://issues.apache.org/jira/browse/SPARK-17109?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Yin Huai updated SPARK-17109: ----------------------------- Issue Type: Bug (was: Test) > When we serialize UserDefinedGenerator to json, scala reflection throws an > error > -------------------------------------------------------------------------------- > > Key: SPARK-17109 > URL: https://issues.apache.org/jira/browse/SPARK-17109 > Project: Spark > Issue Type: Bug > Components: SQL > Reporter: Wenchen Fan > > {code} > [info] - simple explode *** FAILED *** (19 milliseconds) > [info] Failed to parse logical plan to JSON: > [info] Project [word#128] > [info] +- Generate UserDefinedGenerator(words#126), true, false, None, > [word#128] > [info] +- Project [_1#125 AS words#126] > [info] +- LocalRelation [_1#125], [[a b c],[d e]] > (QueryTest.scala:214) > [info] org.scalatest.exceptions.TestFailedException: > [info] at > org.scalatest.Assertions$class.newAssertionFailedException(Assertions.scala:496) > [info] at > org.scalatest.FunSuite.newAssertionFailedException(FunSuite.scala:1555) > [info] at org.scalatest.Assertions$class.fail(Assertions.scala:1348) > [info] at org.scalatest.FunSuite.fail(FunSuite.scala:1555) > [info] at > org.apache.spark.sql.QueryTest.checkJsonFormat(QueryTest.scala:214) > [info] at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:137) > [info] at > org.apache.spark.sql.DataFrameSuite$$anonfun$10.apply$mcV$sp(DataFrameSuite.scala:122) > [info] at > org.apache.spark.sql.DataFrameSuite$$anonfun$10.apply(DataFrameSuite.scala:119) > [info] at > org.apache.spark.sql.DataFrameSuite$$anonfun$10.apply(DataFrameSuite.scala:119) > [info] at > org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) > [info] at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) > [info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) > [info] at org.scalatest.Transformer.apply(Transformer.scala:22) > [info] at org.scalatest.Transformer.apply(Transformer.scala:20) > [info] at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) > [info] at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:42) > [info] at > org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) > [info] at > org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) > [info] at > org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) > [info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) > [info] at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) > [info] at org.scalatest.FunSuite.runTest(FunSuite.scala:1555) > [info] at > org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) > [info] at > org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) > [info] at > org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) > [info] at > org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) > [info] at scala.collection.immutable.List.foreach(List.scala:318) > [info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) > [info] at > org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) > [info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) > [info] at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) > [info] at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) > [info] at org.scalatest.Suite$class.run(Suite.scala:1424) > [info] at > org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) > [info] at > org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) > [info] at > org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) > [info] at org.scalatest.SuperEngine.runImpl(Engine.scala:545) > [info] at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) > [info] at > org.apache.spark.sql.DataFrameSuite.org$scalatest$BeforeAndAfterAll$$super$run(DataFrameSuite.scala:36) > [info] at > org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) > [info] at > org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) > [info] at org.apache.spark.sql.DataFrameSuite.run(DataFrameSuite.scala:36) > [info] at > org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) > [info] at > org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) > [info] at sbt.ForkMain$Run$2.call(ForkMain.java:294) > [info] at sbt.ForkMain$Run$2.call(ForkMain.java:284) > [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) > [info] at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) > [info] at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) > [info] at java.lang.Thread.run(Thread.java:745) > [info] Cause: java.lang.AssertionError: assertion failed: Unsound > substitution from List(type T1, type T2, type T3) to List() > [info] at scala.reflect.internal.Types$SubstMap.<init>(Types.scala:4644) > [info] at scala.reflect.internal.Types$SubstTypeMap.<init>(Types.scala:4761) > [info] at scala.reflect.internal.Types$Type.subst(Types.scala:796) > [info] at > scala.reflect.internal.Types$TypeApiImpl.substituteTypes(Types.scala:321) > [info] at > scala.reflect.internal.Types$TypeApiImpl.substituteTypes(Types.scala:298) > [info] at > org.apache.spark.sql.catalyst.ScalaReflection$$anonfun$getConstructorParameters$1.apply(ScalaReflection.scala:769) > [info] at > org.apache.spark.sql.catalyst.ScalaReflection$$anonfun$getConstructorParameters$1.apply(ScalaReflection.scala:768) > [info] at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) > [info] at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) > [info] at scala.collection.immutable.List.foreach(List.scala:318) > [info] at > scala.collection.TraversableLike$class.map(TraversableLike.scala:244) > [info] at scala.collection.AbstractTraversable.map(Traversable.scala:105) > [info] at > org.apache.spark.sql.catalyst.ScalaReflection$class.getConstructorParameters(ScalaReflection.scala:768) > [info] at > org.apache.spark.sql.catalyst.ScalaReflection$.getConstructorParameters(ScalaReflection.scala:30) > [info] at > org.apache.spark.sql.catalyst.ScalaReflection$.getConstructorParameters(ScalaReflection.scala:610) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson(TreeNode.scala:550) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson$7.apply(TreeNode.scala:539) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson$7.apply(TreeNode.scala:539) > [info] at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) > [info] at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) > [info] at scala.collection.immutable.List.foreach(List.scala:318) > [info] at > scala.collection.TraversableLike$class.map(TraversableLike.scala:244) > [info] at scala.collection.AbstractTraversable.map(Traversable.scala:105) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson(TreeNode.scala:539) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$jsonFields$2.apply(TreeNode.scala:516) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$jsonFields$2.apply(TreeNode.scala:507) > [info] at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) > [info] at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) > [info] at scala.collection.immutable.List.foreach(List.scala:318) > [info] at > scala.collection.TraversableLike$class.map(TraversableLike.scala:244) > [info] at scala.collection.AbstractTraversable.map(Traversable.scala:105) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.jsonFields(TreeNode.scala:507) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1(TreeNode.scala:492) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.jsonValue(TreeNode.scala:497) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.org$apache$spark$sql$catalyst$trees$TreeNode$$parseToJson(TreeNode.scala:537) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$jsonFields$2.apply(TreeNode.scala:516) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$jsonFields$2.apply(TreeNode.scala:507) > [info] at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) > [info] at > scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) > [info] at scala.collection.immutable.List.foreach(List.scala:318) > [info] at > scala.collection.TraversableLike$class.map(TraversableLike.scala:244) > [info] at scala.collection.AbstractTraversable.map(Traversable.scala:105) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.jsonFields(TreeNode.scala:507) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1(TreeNode.scala:492) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1$1.apply(TreeNode.scala:494) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1$1.apply(TreeNode.scala:494) > [info] at scala.collection.immutable.List.foreach(List.scala:318) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.org$apache$spark$sql$catalyst$trees$TreeNode$$collectJsonValue$1(TreeNode.scala:494) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.jsonValue(TreeNode.scala:497) > [info] at > org.apache.spark.sql.catalyst.trees.TreeNode.toJSON(TreeNode.scala:483) > [info] at > org.apache.spark.sql.QueryTest.checkJsonFormat(QueryTest.scala:211) > [info] at org.apache.spark.sql.QueryTest.checkAnswer(QueryTest.scala:137) > [info] at > org.apache.spark.sql.DataFrameSuite$$anonfun$10.apply$mcV$sp(DataFrameSuite.scala:122) > [info] at > org.apache.spark.sql.DataFrameSuite$$anonfun$10.apply(DataFrameSuite.scala:119) > [info] at > org.apache.spark.sql.DataFrameSuite$$anonfun$10.apply(DataFrameSuite.scala:119) > [info] at > org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) > [info] at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) > [info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) > [info] at org.scalatest.Transformer.apply(Transformer.scala:22) > [info] at org.scalatest.Transformer.apply(Transformer.scala:20) > [info] at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) > [info] at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:42) > [info] at > org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) > [info] at > org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) > [info] at > org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) > [info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) > [info] at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) > [info] at org.scalatest.FunSuite.runTest(FunSuite.scala:1555) > [info] at > org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) > [info] at > org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) > [info] at > org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) > [info] at > org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) > [info] at scala.collection.immutable.List.foreach(List.scala:318) > [info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) > [info] at > org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) > [info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) > [info] at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) > [info] at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) > [info] at org.scalatest.Suite$class.run(Suite.scala:1424) > [info] at > org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) > [info] at > org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) > [info] at > org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) > [info] at org.scalatest.SuperEngine.runImpl(Engine.scala:545) > [info] at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) > [info] at > org.apache.spark.sql.DataFrameSuite.org$scalatest$BeforeAndAfterAll$$super$run(DataFrameSuite.scala:36) > [info] at > org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) > [info] at > org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) > [info] at org.apache.spark.sql.DataFrameSuite.run(DataFrameSuite.scala:36) > [info] at > org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) > [info] at > org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) > [info] at sbt.ForkMain$Run$2.call(ForkMain.java:294) > [info] at sbt.ForkMain$Run$2.call(ForkMain.java:284) > [info] at java.util.concurrent.FutureTask.run(FutureTask.java:266) > [info] at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) > [info] at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) > [info] at java.lang.Thread.run(Thread.java:745) > {code} > The stacktrace to run `DataFrameSuite.simple explode` in branch 1.6 -- This message was sent by Atlassian JIRA (v6.3.4#6332) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org