Github user jerryshao commented on a diff in the pull request: https://github.com/apache/spark/pull/2514#discussion_r18012867 --- Diff: core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala --- @@ -707,4 +707,53 @@ class ExternalSorterSuite extends FunSuite with LocalSparkContext with PrivateMe Some(agg), Some(new HashPartitioner(FEW_PARTITIONS)), None, None) assertDidNotBypassMergeSort(sorter4) } + + test("sort without breaking sorting contracts") { + val conf = createSparkConf(true) + conf.set("spark.shuffle.memoryFraction", "0.001") + conf.set("spark.shuffle.manager", "sort") + sc = new SparkContext("local-cluster[1,1,512]", "test", conf) + + val testData = Array[String]( + "hierarch", // -1732884796 + "variants", // -1249574770 + "inwork", // -1183663690 + "isohel", // -1179291542 + "misused" // 1069518484 + ) + val expected = testData.map(s => (s, 200000)) + + def createCombiner(i: Int) = ArrayBuffer(i) + def mergeValue(c: ArrayBuffer[Int], i: Int) = c += i + def mergeCombiners(c1: ArrayBuffer[Int], c2: ArrayBuffer[Int]) = c1 ++= c2 + + val agg = new Aggregator[String, Int, ArrayBuffer[Int]]( + createCombiner, mergeValue, mergeCombiners) + + // Using wrongHashOrdering to show that integer overflow will lead to wrong sort result. + val wrongHashOrdering = new Ordering[String] { + override def compare(a: String, b: String) = { + val h1 = a.hashCode() + val h2 = b.hashCode() + h1 - h2 + } + } + val sorter1 = new ExternalSorter[String, Int, ArrayBuffer[Int]]( + None, None, Some(wrongHashOrdering), None) + sorter1.insertAll(expected.iterator) --- End diff -- OK, thanks for your advice, will do it.
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org