[ https://issues.apache.org/jira/browse/SPARK-9529?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Apache Spark reassigned SPARK-9529: ----------------------------------- Assignee: Apache Spark (was: Davies Liu) > Improve sort on Decimal > ----------------------- > > Key: SPARK-9529 > URL: https://issues.apache.org/jira/browse/SPARK-9529 > Project: Spark > Issue Type: Sub-task > Components: SQL > Reporter: Davies Liu > Assignee: Apache Spark > Priority: Critical > > Right now, it's really slow, just hang there in random tests > {code} > pool-1-thread-1-ScalaTest-running-TungstenSortSuite" prio=5 > tid=0x00007f822bc82800 nid=0x5103 runnable [0x000000011d1be000] > java.lang.Thread.State: RUNNABLE > at java.math.BigInteger.<init>(BigInteger.java:405) > at java.math.BigDecimal.bigTenToThe(BigDecimal.java:3380) > at java.math.BigDecimal.bigMultiplyPowerTen(BigDecimal.java:3508) > at java.math.BigDecimal.setScale(BigDecimal.java:2394) > at java.math.BigDecimal.divide(BigDecimal.java:1691) > at java.math.BigDecimal.divideToIntegralValue(BigDecimal.java:1734) > at java.math.BigDecimal.divideAndRemainder(BigDecimal.java:1891) > at java.math.BigDecimal.remainder(BigDecimal.java:1833) > at scala.math.BigDecimal.remainder(BigDecimal.scala:281) > at scala.math.BigDecimal.isWhole(BigDecimal.scala:215) > at scala.math.BigDecimal.hashCode(BigDecimal.scala:180) > at org.apache.spark.sql.types.Decimal.hashCode(Decimal.scala:260) > at > org.apache.spark.sql.catalyst.InternalRow.hashCode(InternalRow.scala:121) > at org.apache.spark.RangePartitioner.hashCode(Partitioner.scala:201) > at java.lang.Object.toString(Object.java:237) > at > java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1418) > at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1177) > at > java.io.ObjectOutputStream.defaultWriteFields(ObjectOutputStream.java:1547) > at > java.io.ObjectOutputStream.writeSerialData(ObjectOutputStream.java:1508) > at > java.io.ObjectOutputStream.writeOrdinaryObject(ObjectOutputStream.java:1431) > at java.io.ObjectOutputStream.writeObject0(ObjectOutputStream.java:1177) > at java.io.ObjectOutputStream.writeObject(ObjectOutputStream.java:347) > at > org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:44) > at > org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:84) > at > org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:301) > at > org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:294) > at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:122) > at org.apache.spark.SparkContext.clean(SparkContext.scala:2003) > at > org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1.apply(RDD.scala:683) > at > org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1.apply(RDD.scala:682) > at > org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) > at > org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108) > at org.apache.spark.rdd.RDD.withScope(RDD.scala:286) > at org.apache.spark.rdd.RDD.mapPartitions(RDD.scala:682) > at > org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:181) > at > org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:148) > at > org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48) > at org.apache.spark.sql.execution.Exchange.doExecute(Exchange.scala:148) > at > org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:113) > at > org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:113) > at > org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) > at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112) > at > org.apache.spark.sql.execution.Sort$$anonfun$doExecute$1.apply(sort.scala:48) > at > org.apache.spark.sql.execution.Sort$$anonfun$doExecute$1.apply(sort.scala:48) > at > org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48) > at org.apache.spark.sql.execution.Sort.doExecute(sort.scala:47) > at > org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:113) > at > org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:113) > at > org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) > at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:112) > at > org.apache.spark.sql.execution.SparkPlan.executeCollect(SparkPlan.scala:127) > at > org.apache.spark.sql.execution.SparkPlanTest$.executePlan(SparkPlanTest.scala:297) > at > org.apache.spark.sql.execution.SparkPlanTest$.checkAnswer(SparkPlanTest.scala:160) > at > org.apache.spark.sql.execution.SparkPlanTest.checkThatPlansAgree(SparkPlanTest.scala:126) > at > org.apache.spark.sql.execution.TungstenSortSuite$$anonfun$3$$anonfun$apply$2$$anonfun$apply$3$$anonfun$apply$4$$anonfun$apply$1.apply$mcV$sp(TungstenSortSuite.scala:76) > at > org.apache.spark.sql.execution.TungstenSortSuite$$anonfun$3$$anonfun$apply$2$$anonfun$apply$3$$anonfun$apply$4$$anonfun$apply$1.apply(TungstenSortSuite.scala:69) > at > org.apache.spark.sql.execution.TungstenSortSuite$$anonfun$3$$anonfun$apply$2$$anonfun$apply$3$$anonfun$apply$4$$anonfun$apply$1.apply(TungstenSortSuite.scala:69) > at > org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) > at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) > at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) > at org.scalatest.Transformer.apply(Transformer.scala:22) > at org.scalatest.Transformer.apply(Transformer.scala:20) > at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166) > at org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:42) > at > org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163) > at > org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) > at > org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175) > at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) > at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175) > at org.scalatest.FunSuite.runTest(FunSuite.scala:1555) > at > org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) > at > org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208) > at > org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413) > at > org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401) > at scala.collection.immutable.List.foreach(List.scala:318) > at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) > at > org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396) > at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) > at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208) > at org.scalatest.FunSuite.runTests(FunSuite.scala:1555) > at org.scalatest.Suite$class.run(Suite.scala:1424) > at > org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555) > at > org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) > at > org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212) > at org.scalatest.SuperEngine.runImpl(Engine.scala:545) > at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212) > at > org.apache.spark.sql.execution.TungstenSortSuite.org$scalatest$BeforeAndAfterAll$$super$run(TungstenSortSuite.scala:32) > at > org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257) > at > org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) > at > org.apache.spark.sql.execution.TungstenSortSuite.run(TungstenSortSuite.scala:32) > at > org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:462) > at > org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:671) > at sbt.ForkMain$Run$2.call(ForkMain.java:294) > at sbt.ForkMain$Run$2.call(ForkMain.java:284) > at java.util.concurrent.FutureTask.run(FutureTask.java:262) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) > at java.lang.Thread.run(Thread.java:745) > {code} -- This message was sent by Atlassian JIRA (v6.3.4#6332) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org