tgravescs commented on a change in pull request #24615: [SPARK-27488][CORE] 
Driver interface to support GPU resources
URL: https://github.com/apache/spark/pull/24615#discussion_r286184130
 
 

 ##########
 File path: 
core/src/main/scala/org/apache/spark/executor/CoarseGrainedExecutorBackend.scala
 ##########
 @@ -83,51 +83,11 @@ private[spark] class CoarseGrainedExecutorBackend(
     }(ThreadUtils.sameThread)
   }
 
-  // Check that the actual resources discovered will satisfy the user specified
-  // requirements and that they match the configs specified by the user to 
catch
-  // mismatches between what the user requested and what resource manager gave 
or
-  // what the discovery script found.
-  private def checkResourcesMeetRequirements(
-      resourceConfigPrefix: String,
-      reqResourcesAndCounts: Array[(String, String)],
-      actualResources: Map[String, ResourceInformation]): Unit = {
-
-    reqResourcesAndCounts.foreach { case (rName, reqCount) =>
-      if (actualResources.contains(rName)) {
-        val resourceInfo = actualResources(rName)
-
-        if (resourceInfo.addresses.size < reqCount.toLong) {
-          throw new SparkException(s"Resource: $rName with addresses: " +
-            s"${resourceInfo.addresses.mkString(",")} doesn't meet the " +
-            s"requirements of needing $reqCount of them")
-        }
-        // also make sure the resource count on start matches the
-        // resource configs specified by user
-        val userCountConfigName =
-          resourceConfigPrefix + rName + SPARK_RESOURCE_COUNT_POSTFIX
-        val userConfigCount = env.conf.getOption(userCountConfigName).
-          getOrElse(throw new SparkException(s"Resource: $rName not specified 
" +
-            s"via config: $userCountConfigName, but required, " +
-            "please fix your configuration"))
-
-        if (userConfigCount.toLong > resourceInfo.addresses.size) {
-          throw new SparkException(s"Resource: $rName, with addresses: " +
-            s"${resourceInfo.addresses.mkString(",")} " +
-            s"is less than what the user requested for count: 
$userConfigCount, " +
-            s"via $userCountConfigName")
-        }
-      } else {
-        throw new SparkException(s"Executor resource config missing required 
task resource: $rName")
-      }
-    }
-  }
-
   // visible for testing
   def parseOrFindResources(resourcesFile: Option[String]): Map[String, 
ResourceInformation] = {
     // only parse the resources if a task requires them
-    val taskResourceConfigs = 
env.conf.getAllWithPrefix(SPARK_TASK_RESOURCE_PREFIX)
-    val resourceInfo = if (taskResourceConfigs.nonEmpty) {
-      val execResources = resourcesFile.map { resourceFileStr => {
+    val resourceInfo = if 
(env.conf.getAllWithPrefix(SPARK_TASK_RESOURCE_PREFIX).nonEmpty) {
 
 Review comment:
   No, I only want to look for resources is a task we are running requires 
them.  If no tasks require other resources, then its just overhead for us.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to