Github user rdblue commented on a diff in the pull request: https://github.com/apache/spark/pull/21977#discussion_r209771361 --- Diff: resource-managers/yarn/src/test/scala/org/apache/spark/deploy/yarn/BaseYarnClusterSuite.scala --- @@ -179,17 +185,23 @@ abstract class BaseYarnClusterSuite * the tests enforce that something is written to a file after everything is ok to indicate * that the job succeeded. */ - protected def checkResult(finalState: SparkAppHandle.State, result: File): Unit = { - checkResult(finalState, result, "success") - } - protected def checkResult( finalState: SparkAppHandle.State, result: File, - expected: String): Unit = { + expected: String = "success", + outFile: Option[File] = None): Unit = { finalState should be (SparkAppHandle.State.FINISHED) val resultString = Files.toString(result, StandardCharsets.UTF_8) - resultString should be (expected) + + // the context message is passed to assert as Any instead of a function. to lazily load the + // output from the file, this passes an anonymous object that loads it in toString when building + // an error message + val output = new Object() { + override def toString: String = outFile + .map(Files.toString(_, StandardCharsets.UTF_8)) + .getOrElse("(stdout/stderr was not captured)") + } + assert(resultString === expected, output) --- End diff -- These changes aren't required for the Python memory setting, but without them the `YarnClusterSuite` produces no helpful information for debugging what went wrong in the tests. I think it is helpful to add this as part of this PR so that the errors will be shown in future test runs.
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org