Github user srowen commented on a diff in the pull request: https://github.com/apache/spark/pull/20761#discussion_r206644780 --- Diff: resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/ResourceTypeValidator.scala --- @@ -0,0 +1,193 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.spark.deploy.yarn + +import scala.collection.mutable + +import org.apache.spark.{SparkConf, SparkException} +import org.apache.spark.deploy.yarn.ProcessType.{AM, DRIVER, EXECUTOR, ProcessType} +import org.apache.spark.deploy.yarn.ResourceType.{CORES, MEMORY, ResourceType} +import org.apache.spark.deploy.yarn.RunMode.{CLIENT, CLUSTER, RunMode} +import org.apache.spark.deploy.yarn.config._ +import org.apache.spark.internal.config._ + +private object ProcessType extends Enumeration { + type ProcessType = Value + val DRIVER = Value("driver") + val EXECUTOR = Value("executor") + val AM = Value("am") +} + +private object RunMode extends Enumeration { + type RunMode = Value + val CLIENT = Value("client") + val CLUSTER = Value("cluster") +} + +private object ResourceType extends Enumeration { + type ResourceType = Value + val CORES = Value("cores") + val MEMORY = Value("memory") +} + +private object ResourceTypeValidator { + private val ERROR_PREFIX: String = "Error: " + private val POSSIBLE_RESOURCE_DEFINITIONS = Seq[ResourceConfigProperties]( + new ResourceConfigProperties(AM, Some(CLIENT), MEMORY), + new ResourceConfigProperties(AM, Some(CLIENT), CORES), + new ResourceConfigProperties(DRIVER, Some(CLUSTER), MEMORY), + new ResourceConfigProperties(DRIVER, Some(CLUSTER), CORES), + new ResourceConfigProperties(EXECUTOR, None, MEMORY), + new ResourceConfigProperties(EXECUTOR, None, CORES)) + + /** + * Validates sparkConf and throws a SparkException if a standard resource (memory or cores) + * is defined with the property spark.yarn.x.resource.y<br> + * + * Example of an invalid config:<br> + * - spark.yarn.driver.resource.memory=2g<br> + * + * Please note that if multiple resources are defined like described above, + * the error messages will be concatenated.<br> + * Example of such a config:<br> + * - spark.yarn.driver.resource.memory=2g<br> + * - spark.yarn.executor.resource.cores=2<br> + * Then the following two error messages will be printed:<br> + * - "memory cannot be requested with config spark.yarn.driver.resource.memory, + * please use config spark.driver.memory instead!<br> + * - "cores cannot be requested with config spark.yarn.executor.resource.cores, + * please use config spark.executor.cores instead!<br> + * + * @param sparkConf + */ + def validateResources(sparkConf: SparkConf): Unit = { + val requestedResources = new RequestedResources(sparkConf) + val sb = new mutable.StringBuilder() + POSSIBLE_RESOURCE_DEFINITIONS.foreach { rcp => + val customResources: Map[String, String] = getCustomResourceValue(requestedResources, rcp) + val (standardResourceConfigKey: String, customResourceConfigKey: String) = + getResourceConfigKeys(rcp) + + val errorMessage = + if (customResources.contains(customResourceConfigKey)) { + s"${rcp.resourceType} cannot be requested with config $customResourceConfigKey, " + + s"please use config $standardResourceConfigKey instead!" + } else { + "" + } + if (errorMessage.nonEmpty) { + printErrorMessageToBuffer(sb, errorMessage) + } + } + + if (sb.nonEmpty) { + throw new SparkException(sb.toString()) + } + } + + /** + * Returns the requested map of custom resources, + * based on the ResourceConfigProperties argument. + * @return --- End diff -- Just make the text above the value of the `@return` tag.
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org