Github user vanzin commented on a diff in the pull request: https://github.com/apache/spark/pull/20761#discussion_r220023729 --- Diff: resource-managers/yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnAllocator.scala --- @@ -140,10 +140,19 @@ private[yarn] class YarnAllocator( } // Number of cores per executor. protected val executorCores = sparkConf.get(EXECUTOR_CORES) - // Resource capability requested for each executors - private[yarn] val resource = Resource.newInstance( - executorMemory + memoryOverhead + pysparkWorkerMemory, - executorCores) + + private val executorResourceTypes = --- End diff -- These aren't really types, they're requests.
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org