[ 
https://issues.apache.org/jira/browse/SPARK-38736?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Max Gekk updated SPARK-38736:
-----------------------------
    Description: 
Add tests for the error classes *INVALID_ARRAY_INDEX* to 
QueryExecutionErrorsSuite. The test should cover the exception throw in 
QueryExecutionErrors:

{code:scala}
  private def invalidArrayIndexErrorInternal(
      index: Int,
      numElements: Int,
      key: String): ArrayIndexOutOfBoundsException = {
    new SparkArrayIndexOutOfBoundsException(errorClass = "INVALID_ARRAY_INDEX",
      messageParameters = Array(index.toString, numElements.toString, key))
  }

  def invalidElementAtIndexError(
       index: Int,
       numElements: Int): ArrayIndexOutOfBoundsException = {
    new SparkArrayIndexOutOfBoundsException(errorClass = 
"INVALID_ARRAY_INDEX_IN_ELEMENT_AT",
      messageParameters = Array(index.toString, numElements.toString, 
SQLConf.ANSI_ENABLED.key))
  }
{code}

For example, here is a test for the error class *UNSUPPORTED_FEATURE*: 
https://github.com/apache/spark/blob/34e3029a43d2a8241f70f2343be8285cb7f231b9/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala#L151-L170

+The test must have a check of:+
# the entire error message
# sqlState if it is defined in the error-classes.json file
# the error class

  was:
Add tests for the error class *INTERNAL_ERROR* to QueryExecutionErrorsSuite. 
The test should cover the exception throw in QueryExecutionErrors:

{code:scala}
  def logicalHintOperatorNotRemovedDuringAnalysisError(): Throwable = {
    new SparkIllegalStateException(errorClass = "INTERNAL_ERROR",
      messageParameters = Array(
        "Internal error: logical hint operator should have been removed during 
analysis"))
  }

  def cannotEvaluateExpressionError(expression: Expression): Throwable = {
    new SparkUnsupportedOperationException(errorClass = "INTERNAL_ERROR",
      messageParameters = Array(s"Cannot evaluate expression: $expression"))
  }

  def cannotGenerateCodeForExpressionError(expression: Expression): Throwable = 
{
    new SparkUnsupportedOperationException(errorClass = "INTERNAL_ERROR",
      messageParameters = Array(s"Cannot generate code for expression: 
$expression"))
  }

  def cannotTerminateGeneratorError(generator: UnresolvedGenerator): Throwable 
= {
    new SparkUnsupportedOperationException(errorClass = "INTERNAL_ERROR",
      messageParameters = Array(s"Cannot terminate expression: $generator"))
  }
  def methodNotDeclaredError(name: String): Throwable = {
    new SparkNoSuchMethodException(errorClass = "INTERNAL_ERROR",
      messageParameters = Array(
        s"""A method named "$name" is not declared in any enclosing class nor 
any supertype"""))
  }
{code}

For example, here is a test for the error class *UNSUPPORTED_FEATURE*: 
https://github.com/apache/spark/blob/34e3029a43d2a8241f70f2343be8285cb7f231b9/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala#L151-L170

+The test must have a check of:+
# the entire error message
# sqlState if it is defined in the error-classes.json file
# the error class


> Test the error classes: INVALID_ARRAY_INDEX*
> --------------------------------------------
>
>                 Key: SPARK-38736
>                 URL: https://issues.apache.org/jira/browse/SPARK-38736
>             Project: Spark
>          Issue Type: Sub-task
>          Components: SQL
>    Affects Versions: 3.4.0
>            Reporter: Max Gekk
>            Priority: Minor
>              Labels: starter
>
> Add tests for the error classes *INVALID_ARRAY_INDEX* to 
> QueryExecutionErrorsSuite. The test should cover the exception throw in 
> QueryExecutionErrors:
> {code:scala}
>   private def invalidArrayIndexErrorInternal(
>       index: Int,
>       numElements: Int,
>       key: String): ArrayIndexOutOfBoundsException = {
>     new SparkArrayIndexOutOfBoundsException(errorClass = 
> "INVALID_ARRAY_INDEX",
>       messageParameters = Array(index.toString, numElements.toString, key))
>   }
>   def invalidElementAtIndexError(
>        index: Int,
>        numElements: Int): ArrayIndexOutOfBoundsException = {
>     new SparkArrayIndexOutOfBoundsException(errorClass = 
> "INVALID_ARRAY_INDEX_IN_ELEMENT_AT",
>       messageParameters = Array(index.toString, numElements.toString, 
> SQLConf.ANSI_ENABLED.key))
>   }
> {code}
> For example, here is a test for the error class *UNSUPPORTED_FEATURE*: 
> https://github.com/apache/spark/blob/34e3029a43d2a8241f70f2343be8285cb7f231b9/sql/core/src/test/scala/org/apache/spark/sql/errors/QueryCompilationErrorsSuite.scala#L151-L170
> +The test must have a check of:+
> # the entire error message
> # sqlState if it is defined in the error-classes.json file
> # the error class



--
This message was sent by Atlassian Jira
(v8.20.1#820001)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to