Github user vanzin commented on a diff in the pull request: https://github.com/apache/spark/pull/19041#discussion_r143815711 --- Diff: core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala --- @@ -416,63 +423,52 @@ private[spark] class ExecutorAllocationManager( * Request the cluster manager to remove the given executors. * Returns the list of executors which are removed. */ - private def removeExecutors(executors: Seq[String]): Seq[String] = synchronized { - val executorIdsToBeRemoved = new ArrayBuffer[String] - + private def removeExecutors(executors: Seq[String]): Unit = synchronized { logInfo("Request to remove executorIds: " + executors.mkString(", ")) - val numExistingExecutors = allocationManager.executorIds.size - executorsPendingToRemove.size - - var newExecutorTotal = numExistingExecutors - executors.foreach { executorIdToBeRemoved => - if (newExecutorTotal - 1 < minNumExecutors) { - logDebug(s"Not removing idle executor $executorIdToBeRemoved because there are only " + - s"$newExecutorTotal executor(s) left (minimum number of executor limit $minNumExecutors)") - } else if (newExecutorTotal - 1 < numExecutorsTarget) { - logDebug(s"Not removing idle executor $executorIdToBeRemoved because there are only " + - s"$newExecutorTotal executor(s) left (number of executor target $numExecutorsTarget)") - } else if (canBeKilled(executorIdToBeRemoved)) { - executorIdsToBeRemoved += executorIdToBeRemoved - newExecutorTotal -= 1 - } - } + val numExistingExecs = allocationManager.executorIds.size - executorsPendingToRemove.size + val execCountFloor = math.max(minNumExecutors, numExecutorsTarget) + val (executorIdsToBeRemoved, dontRemove) = executors + .filter(canBeKilled) + .splitAt(numExistingExecs - execCountFloor) - if (executorIdsToBeRemoved.isEmpty) { - return Seq.empty[String] + dontRemove.foreach { execId => --- End diff -- minor: add `log.isDebugEnabled` check.
--- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org