srielau commented on code in PR #36693: URL: https://github.com/apache/spark/pull/36693#discussion_r891355010
########## core/src/test/scala/org/apache/spark/SparkFunSuite.scala: ########## @@ -264,6 +264,87 @@ abstract class SparkFunSuite } } + /** + * Checks an exception with an error class against expected results. + * @param exception The exception to check + * @param errorClass The expected error class identifying the error + * @param errorSubClass Optional the expected subclass, None if not given + * @param sqlState Optional the expected SQLSTATE, not verified if not supplied + * @param parameters A map of parameter names and values. The names are as defined + * in the error-classes file. + */ + protected def checkError( + exception: Exception with SparkThrowable, + errorClass: String, + errorSubClass: Option[String], + sqlState: Option[String], + parameters: Map[String, String], + matchPVals: Boolean = false): Unit = { + assert(exception.getErrorClass === errorClass) + if (exception.getErrorSubClass != null) { assert(errorSubClass.isDefined) } + errorSubClass.foreach(subClass => assert(exception.getErrorSubClass === subClass)) + sqlState.foreach(state => assert(exception.getSqlState === state)) + val expectedParameters = (exception.getParameterNames zip exception.getMessageParameters).toMap + if (matchPVals == true) { + assert(expectedParameters.size === parameters.size) + expectedParameters.foreach( + exp => { + val parm = parameters.getOrElse(exp._1, + throw new IllegalArgumentException("Missing parameter" + exp._1)) + if (!exp._2.matches(parm)) { + throw new IllegalArgumentException("(" + exp._1 + ", " + exp._2 + + ") does not match: " + parm) + } + } + ) + } else { + assert(expectedParameters === parameters) + } + } + + protected def checkError( + exception: Exception with SparkThrowable, + errorClass: String, + errorSubClass: String, + sqlState: String, + parameters: Map[String, String]): Unit = + checkError(exception, errorClass, Some(errorSubClass), Some(sqlState), parameters) + + protected def checkError( + exception: Exception with SparkThrowable, + errorClass: String, + sqlState: String, + parameters: Map[String, String]): Unit = + checkError(exception, errorClass, None, Some(sqlState), parameters) + + protected def checkError( + exception: Exception with SparkThrowable, + errorClass: String, + parameters: Map[String, String]): Unit = + checkError(exception, errorClass, None, None, parameters) + + /** + * Checks an exception with an error class against expected results. + * @param exception The exception to check + * @param errorClass The expected error class identifying the error + * @param sqlState Optional the expected SQLSTATE, not verified if not supplied + * @param parameters An array of values. This does not verify the right name association. + */ + protected def checkError( + exception: Exception with SparkThrowable, + errorClass: String, + sqlState: String, + parameters: Array[String]): Unit = Review Comment: I haven't thought about it either way. Do you want me to reduce the number of overloads? -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org