[ https://issues.apache.org/jira/browse/SPARK-38719?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17646963#comment-17646963 ]
Jayadeep Jayaraman edited comment on SPARK-38719 at 12/14/22 6:56 AM: ---------------------------------------------------------------------- [~maxgekk] - I tried creating the failure as shown below, but somehow this specific error does not show up. Can you suggest what can be the issue ? {code:java} // scala> val null_data = Seq( | (1, ("ABC",null,"value12")), | (2, ("MNO",null,"value22")), | (3, ("PQR",null,"value32")) | ) null_data: Seq[(Int, (String, Null, String))] = List((1,(ABC,null,value12)), (2,(MNO,null,value22)), (3,(PQR,null,value32)))scala> val df = null_data.toDF() df: org.apache.spark.sql.DataFrame = [_1: int, _2: struct<_1: string, _2: null ... 1 more field>]scala> val null_data = Seq( | (1, ("ABC",null)), | (2, ("MNO",null)), | (3, ("PQR",null)) | ) null_data: Seq[(Int, (String, Null))] = List((1,(ABC,null)), (2,(MNO,null)), (3,(PQR,null)))scala> scala> val df = null_data.toDF() df: org.apache.spark.sql.DataFrame = [_1: int, _2: struct<_1: string, _2: null>]scala> df.printSchema() root |-- _1: integer (nullable = false) |-- _2: struct (nullable = true) | |-- _1: string (nullable = true) | |-- _2: null (nullable = true) scala> df.withColumn("_2._2",col("_2._2").cast(IntegerType)).show() +---+-----------+-----+ | _1| _2|_2._2| +---+-----------+-----+ | 1|{ABC, null}| null| | 2|{MNO, null}| null| | 3|{PQR, null}| null| +---+-----------+-----+{code} was (Author: jjayadeep): I tried creating the failure as shown below, but somehow this specific error does not show up. Can you suggest what can be the issue ? {code:java} // scala> val null_data = Seq( | (1, ("ABC",null,"value12")), | (2, ("MNO",null,"value22")), | (3, ("PQR",null,"value32")) | ) null_data: Seq[(Int, (String, Null, String))] = List((1,(ABC,null,value12)), (2,(MNO,null,value22)), (3,(PQR,null,value32)))scala> val df = null_data.toDF() df: org.apache.spark.sql.DataFrame = [_1: int, _2: struct<_1: string, _2: null ... 1 more field>]scala> val null_data = Seq( | (1, ("ABC",null)), | (2, ("MNO",null)), | (3, ("PQR",null)) | ) null_data: Seq[(Int, (String, Null))] = List((1,(ABC,null)), (2,(MNO,null)), (3,(PQR,null)))scala> scala> val df = null_data.toDF() df: org.apache.spark.sql.DataFrame = [_1: int, _2: struct<_1: string, _2: null>]scala> df.printSchema() root |-- _1: integer (nullable = false) |-- _2: struct (nullable = true) | |-- _1: string (nullable = true) | |-- _2: null (nullable = true) scala> df.withColumn("_2._2",col("_2._2").cast(IntegerType)).show() +---+-----------+-----+ | _1| _2|_2._2| +---+-----------+-----+ | 1|{ABC, null}| null| | 2|{MNO, null}| null| | 3|{PQR, null}| null| +---+-----------+-----+{code} > Test the error class: CANNOT_CAST_DATATYPE > ------------------------------------------ > > Key: SPARK-38719 > URL: https://issues.apache.org/jira/browse/SPARK-38719 > Project: Spark > Issue Type: Sub-task > Components: SQL > Affects Versions: 3.4.0 > Reporter: Max Gekk > Priority: Minor > Labels: starter > > Add at least one test for the error class *CANNOT_CAST_DATATYPE* to > QueryExecutionErrorsSuite. The test should cover the exception throw in > QueryExecutionErrors: > {code:scala} > def cannotCastFromNullTypeError(to: DataType): Throwable = { > new SparkException(errorClass = "CANNOT_CAST_DATATYPE", > messageParameters = Array(NullType.typeName, to.typeName), null) > } > {code} > For example, here is a test for the error class *UNSUPPORTED_FEATURE*: > https://github.com/apache/spark/blob/34e3029a43d2a8241f70f2343be8285cb7f231b9/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala#L151-L170 > +The test must have a check of:+ > # the entire error message > # sqlState if it is defined in the error-classes.json file > # the error class -- This message was sent by Atlassian Jira (v8.20.10#820010) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org