Github user srowen commented on a diff in the pull request: https://github.com/apache/spark/pull/20761#discussion_r218476955 --- Diff: resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ResourceTypeValidator.scala --- @@ -0,0 +1,107 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.deploy.yarn + +import scala.collection.mutable + +import org.apache.spark.{SparkConf, SparkException} +import org.apache.spark.deploy.yarn.config._ + +private object ResourceTypeValidator { + private val ERROR_PREFIX: String = "Error: " + private val POSSIBLE_RESOURCE_DEFINITIONS = Seq[(String, String)]( + ("spark.yarn.am.memory", YARN_AM_RESOURCE_TYPES_PREFIX + "memory"), + ("spark.yarn.am.cores", YARN_AM_RESOURCE_TYPES_PREFIX + "cores"), + ("spark.driver.memory", YARN_DRIVER_RESOURCE_TYPES_PREFIX + "memory"), + ("spark.driver.cores", YARN_DRIVER_RESOURCE_TYPES_PREFIX + "cores"), + ("spark.executor.memory", YARN_EXECUTOR_RESOURCE_TYPES_PREFIX + "memory"), + ("spark.executor.cores", YARN_EXECUTOR_RESOURCE_TYPES_PREFIX + "cores")) + + /** + * Validates sparkConf and throws a SparkException if a standard resource (memory or cores) + * is defined with the property spark.yarn.x.resource.y<br> + * + * Example of an invalid config:<br> + * - spark.yarn.driver.resource.memory=2g<br> + * + * Please note that if multiple resources are defined like described above, + * the error messages will be concatenated.<br> + * Example of such a config:<br> + * - spark.yarn.driver.resource.memory=2g<br> + * - spark.yarn.executor.resource.cores=2<br> + * Then the following two error messages will be printed:<br> + * - "memory cannot be requested with config spark.yarn.driver.resource.memory, + * please use config spark.driver.memory instead!<br> + * - "cores cannot be requested with config spark.yarn.executor.resource.cores, + * please use config spark.executor.cores instead!<br> + * + * @param sparkConf + */ + def validateResources(sparkConf: SparkConf): Unit = { + val overallErrorMessage = new mutable.StringBuilder() + POSSIBLE_RESOURCE_DEFINITIONS.foreach { resdef => + val customResources: Map[String, String] = + getCustomResourceValuesForKey(sparkConf, resdef._1) + + val errorMessage = getErrorMessage(customResources, resdef._1, resdef._2) + if (errorMessage.nonEmpty) { + overallErrorMessage.append(s"$ERROR_PREFIX$errorMessage\n") + } + } + + // throw exception after loop + if (overallErrorMessage.nonEmpty) { + throw new SparkException(overallErrorMessage.toString()) + } + } + + def getCustomResourceValuesForKey(sparkConf: SparkConf, key: String): Map[String, String] = { + if (key contains "am") { + extractCustomResources(sparkConf, YARN_AM_RESOURCE_TYPES_PREFIX) + } else if (key contains "driver") { + extractCustomResources(sparkConf, YARN_DRIVER_RESOURCE_TYPES_PREFIX) + } else if (key contains "executor") { + extractCustomResources(sparkConf, YARN_EXECUTOR_RESOURCE_TYPES_PREFIX) + } else Map.empty[String, String] + } + + def getErrorMessage( + customResources: Map[String, String], + standardResourceConfigKey: String, + customResourceConfigKey: String): String = { + if (customResources.contains(customResourceConfigKey)) { + val idx = standardResourceConfigKey.lastIndexOf(".") + val resourceType = standardResourceConfigKey.substring(idx + 1) + s"$resourceType cannot be requested with config $customResourceConfigKey, " + + s"please use config $standardResourceConfigKey instead!" + } else { + "" + } + } + + private def extractCustomResources( + sparkConf: SparkConf, + propertyPrefix: String): Map[String, String] = { + val result: collection.mutable.HashMap[String, String] = --- End diff -- Do you need to duplicate the type here?
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org