peterxcli commented on code in PR #3369:
URL: https://github.com/apache/datafusion-comet/pull/3369#discussion_r2759771938


##########
spark/src/test/scala/org/apache/comet/CometMapExpressionSuite.scala:
##########
@@ -157,4 +157,48 @@ class CometMapExpressionSuite extends CometTestBase {
     }
   }
 
+  test("map_contains_key") {
+    withTempDir { dir =>
+      val path = new Path(dir.toURI.toString, "test.parquet")
+      val filename = path.toString
+      val random = new Random(42)
+      withSQLConf(CometConf.COMET_ENABLED.key -> "false") {
+        val schemaGenOptions =
+          SchemaGenOptions(generateArray = true, generateStruct = false, 
generateMap = true)
+        val dataGenOptions = DataGenOptions(allowNull = false, 
generateNegativeZero = false)
+        ParquetGenerator.makeParquetFile(
+          random,
+          spark,
+          filename,
+          100,
+          schemaGenOptions,
+          dataGenOptions)
+      }
+      spark.read.parquet(filename).createOrReplaceTempView("t1")
+
+      checkSparkAnswer(
+        spark.sql("SELECT map_contains_key(c14, element_at(map_keys(c14), 1)) 
FROM t1"))
+      checkSparkAnswer(spark.sql("SELECT map_contains_key(c14, 999999) FROM 
t1"))
+
+      checkAnswer(
+        spark.sql("SELECT map_contains_key(c14, element_at(map_keys(c14), 1)) 
FROM t1 LIMIT 1"),
+        Row(true))
+      checkAnswer(spark.sql("SELECT map_contains_key(c14, 999999) FROM t1 
LIMIT 1"), Row(false))
+
+      // Empty map
+      checkSparkAnswerAndOperator(spark.sql("""SELECT map_contains_key(
+          |  map_from_arrays(CAST(array() AS array<string>), CAST(array() AS 
array<int>)),
+          |  'any_key'
+          |) FROM t1 LIMIT 1""".stripMargin))
+
+      // Empty map with int keys
+      checkSparkAnswerAndOperator(spark.sql(
+        "SELECT map_contains_key(map_from_arrays(CAST(array() AS array<int>), 
CAST(array() AS array<string>)), 0) FROM t1"))
+
+      // Empty map with string keys
+      checkSparkAnswerAndOperator(spark.sql(
+        "SELECT map_contains_key(map_from_arrays(CAST(array() AS 
array<string>), CAST(array() AS array<double>)), 'key') FROM t1"))

Review Comment:
   I'm trying my best to replicate the scala test suit in spark:
   - 
[apache/spark@04b821c/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala#L609-L618](https://github.com/apache/spark/blob/04b821c69e85be5f51a1270b3a9a4155afdb5334/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala#L609-L618)
   - 
[apache/spark@04b821c/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala#L1663-L1672](https://github.com/apache/spark/blob/04b821c69e85be5f51a1270b3a9a4155afdb5334/sql/core/src/test/scala/org/apache/spark/sql/ParametersSuite.scala#L1663-L1672)
   
   but I notice they also have some sql test for this function, let me merge 
all of them into the slt



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to