This is an automated email from the ASF dual-hosted git repository.
mbutrovich pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/datafusion-comet.git
The following commit(s) were added to refs/heads/main by this push:
new 8b041a9e5 chore: use fixed seed in RNG in tests (#2917)
8b041a9e5 is described below
commit 8b041a9e5e426855110cdf96a63df3659a5a1a91
Author: Andy Grove <[email protected]>
AuthorDate: Tue Dec 16 10:03:45 2025 -0700
chore: use fixed seed in RNG in tests (#2917)
---
.../test/scala/org/apache/comet/CometExpressionSuite.scala | 2 +-
.../scala/org/apache/comet/parquet/ParquetReadSuite.scala | 12 ++++++------
.../src/test/scala/org/apache/spark/sql/CometTestBase.scala | 8 ++++----
3 files changed, 11 insertions(+), 11 deletions(-)
diff --git a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
index c6d505691..99149d375 100644
--- a/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/CometExpressionSuite.scala
@@ -1773,7 +1773,7 @@ class CometExpressionSuite extends CometTestBase with
AdaptiveSparkPlanHelper {
}
test("Decimal random number tests") {
- val rand = scala.util.Random
+ val rand = new scala.util.Random(42)
def makeNum(p: Int, s: Int): String = {
val int1 = rand.nextLong()
val int2 = rand.nextLong().abs
diff --git
a/spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala
b/spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala
index 73ddf750b..b028a70dc 100644
--- a/spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala
+++ b/spark/src/test/scala/org/apache/comet/parquet/ParquetReadSuite.scala
@@ -348,7 +348,7 @@ abstract class ParquetReadSuite extends CometTestBase {
}
test("mixed nulls and non-nulls") {
- val rand = scala.util.Random
+ val rand = new scala.util.Random(42)
val data = (0 to 100).map { i =>
val row: (Boolean, Integer, java.lang.Long, java.lang.Float,
java.lang.Double, String) = {
if (rand.nextBoolean()) {
@@ -403,7 +403,7 @@ abstract class ParquetReadSuite extends CometTestBase {
pageSize = pageSize,
dictionaryPageSize = pageSize)
- val rand = scala.util.Random
+ val rand = new scala.util.Random(42)
val expected = (0 until n).map { i =>
if (rand.nextBoolean()) {
None
@@ -626,7 +626,7 @@ abstract class ParquetReadSuite extends CometTestBase {
dictionaryPageSize = dictionaryPageSize,
pageRowCountLimit = pageRowCount)
- val rand = scala.util.Random
+ val rand = new scala.util.Random(42)
val expected = (0 until n).map { i =>
// use a single value for the first page, to make sure dictionary
encoding kicks in
val value = if (i < pageRowCount) i % 8 else i
@@ -814,7 +814,7 @@ abstract class ParquetReadSuite extends CometTestBase {
dictionaryPageSize = pageSize,
rowGroupSize = 1024 * 128)
- val rand = scala.util.Random
+ val rand = new scala.util.Random(42)
val expected = (0 until n).map { i =>
if (rand.nextBoolean()) {
None
@@ -1564,7 +1564,7 @@ abstract class ParquetReadSuite extends CometTestBase {
pageSize = pageSize,
dictionaryPageSize = pageSize)
- val rand = scala.util.Random
+ val rand = new scala.util.Random(42)
val expected = (0 until n).map { i =>
if (rand.nextBoolean()) {
None
@@ -1662,7 +1662,7 @@ abstract class ParquetReadSuite extends CometTestBase {
dictionaryPageSize = dictionaryPageSize,
pageRowCountLimit = pageRowCount)
- val rand = scala.util.Random
+ val rand = new scala.util.Random(42)
val expected = (0 until n).map { i =>
// use a single value for the first page, to make sure dictionary
encoding kicks in
val value = if (i < pageRowCount) i % 8 else i
diff --git a/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala
b/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala
index 54854cb83..bc9e521d3 100644
--- a/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala
+++ b/spark/src/test/scala/org/apache/spark/sql/CometTestBase.scala
@@ -694,7 +694,7 @@ abstract class CometTestBase
val idGenerator = new AtomicInteger(0)
- val rand = scala.util.Random
+ val rand = new scala.util.Random(42)
val data = (begin until end).map { i =>
if (nullEnabled && rand.nextBoolean()) {
None
@@ -788,7 +788,7 @@ abstract class CometTestBase
rowGroupSize = rowGroupSize)
val div = if (dictionaryEnabled) 10 else n // maps value to a small range
for dict to kick in
- val rand = scala.util.Random
+ val rand = new scala.util.Random(42)
val expected = (0 until n).map { i =>
if (rand.nextBoolean()) {
None
@@ -842,7 +842,7 @@ abstract class CometTestBase
rowGroupSize = rowGroupSize)
val div = if (dictionaryEnabled) 10 else n // maps value to a small range
for dict to kick in
- val rand = scala.util.Random
+ val rand = new scala.util.Random(42)
val expected = (0 until n).map { i =>
if (rand.nextBoolean()) {
None
@@ -1240,7 +1240,7 @@ abstract class CometTestBase
val schema = MessageTypeParser.parseMessageType(schemaStr)
val writer = createParquetWriter(schema, path, dictionaryEnabled = true)
- val rand = scala.util.Random
+ val rand = new scala.util.Random(42)
val expected = (0 until total).map { i =>
// use a single value for the first page, to make sure dictionary
encoding kicks in
if (rand.nextBoolean()) None
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]