amandeep-sharma commented on a change in pull request #31545: URL: https://github.com/apache/spark/pull/31545#discussion_r574257299
########## File path: sql/core/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionsSuite.scala ########## @@ -460,4 +460,12 @@ class DataFrameNaFunctionsSuite extends QueryTest with SharedSparkSession { Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) :: Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) :: Nil) } + + test("SPARK-34417 - test fillMap() for column with a dot in the name") { + val na = "n/a" + checkAnswer( + Seq(("abc", 23L), ("def", 44L), (null, 0L)).toDF("ColWith.Dot", "Col") + .na.fill(Map("`ColWith.Dot`" -> na)), Review comment: @imback82 i think your point is that if data frame has a column having dot in the name, but it is not part of the null fill map then it will fail. Yes, it will fail. It will need fix in the catalyst layer. ```import org.apache.spark.sql.SparkSession object ColumnNameWithDot { def main(args: Array[String]): Unit = { val spark = SparkSession.builder.appName("Simple Application") .config("spark.master", "local").getOrCreate() spark.sparkContext.setLogLevel("OFF") import spark.implicits._ val df = Seq(("abc", 23), ("def", 44), (null, 0)).toDF("ColWith.Dot", "Col.2") df.na.fill(Map("`ColWith.Dot`" -> "na")) .show() } }``` ########## File path: sql/core/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionsSuite.scala ########## @@ -460,4 +460,12 @@ class DataFrameNaFunctionsSuite extends QueryTest with SharedSparkSession { Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) :: Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) :: Nil) } + + test("SPARK-34417 - test fillMap() for column with a dot in the name") { + val na = "n/a" + checkAnswer( + Seq(("abc", 23L), ("def", 44L), (null, 0L)).toDF("ColWith.Dot", "Col") + .na.fill(Map("`ColWith.Dot`" -> na)), Review comment: @imback82 i think your point is that if data frame has a column having dot in the name, but it is not part of the null fill map then it will fail. Yes, it will fail. It will need fix in the catalyst layer. ```import org.apache.spark.sql.SparkSession object ColumnNameWithDot { def main(args: Array[String]): Unit = { val spark = SparkSession.builder.appName("Simple Application") .config("spark.master", "local").getOrCreate() spark.sparkContext.setLogLevel("OFF") import spark.implicits._ val df = Seq(("abc", 23), ("def", 44), (null, 0)).toDF("ColWith.Dot", "Col.2") df.na.fill(Map("`ColWith.Dot`" -> "na")) .show() } }` ########## File path: sql/core/src/test/scala/org/apache/spark/sql/DataFrameNaFunctionsSuite.scala ########## @@ -460,4 +460,12 @@ class DataFrameNaFunctionsSuite extends QueryTest with SharedSparkSession { Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) :: Row(0, 0L, 0.toShort, 0.toByte, Float.NaN, Double.NaN) :: Nil) } + + test("SPARK-34417 - test fillMap() for column with a dot in the name") { + val na = "n/a" + checkAnswer( + Seq(("abc", 23L), ("def", 44L), (null, 0L)).toDF("ColWith.Dot", "Col") + .na.fill(Map("`ColWith.Dot`" -> na)), Review comment: @imback82 i think your point is that if data frame has a column having dot in the name, but it is not part of the null fill map then it will fail. Yes, it will fail. It will need fix in the catalyst layer. ```import org.apache.spark.sql.SparkSession object ColumnNameWithDot { def main(args: Array[String]): Unit = { val spark = SparkSession.builder.appName("Simple Application") .config("spark.master", "local").getOrCreate() spark.sparkContext.setLogLevel("OFF") import spark.implicits._ val df = Seq(("abc", 23), ("def", 44), (null, 0)).toDF("ColWith.Dot", "Col.2") df.na.fill(Map("`ColWith.Dot`" -> "na")) .show() } } ---------------------------------------------------------------- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org