Github user vanzin commented on a diff in the pull request:

    https://github.com/apache/spark/pull/19041#discussion_r138754158
  
    --- Diff: 
core/src/main/scala/org/apache/spark/ExecutorAllocationManager.scala ---
    @@ -416,63 +424,52 @@ private[spark] class ExecutorAllocationManager(
        * Request the cluster manager to remove the given executors.
        * Returns the list of executors which are removed.
        */
    -  private def removeExecutors(executors: Seq[String]): Seq[String] = 
synchronized {
    -    val executorIdsToBeRemoved = new ArrayBuffer[String]
    -
    +  private def removeExecutors(executors: Seq[String]): Unit = synchronized 
{
         logInfo("Request to remove executorIds: " + executors.mkString(", "))
    -    val numExistingExecutors = allocationManager.executorIds.size - 
executorsPendingToRemove.size
    -
    -    var newExecutorTotal = numExistingExecutors
    -    executors.foreach { executorIdToBeRemoved =>
    -      if (newExecutorTotal - 1 < minNumExecutors) {
    -        logDebug(s"Not removing idle executor $executorIdToBeRemoved 
because there are only " +
    -          s"$newExecutorTotal executor(s) left (minimum number of executor 
limit $minNumExecutors)")
    -      } else if (newExecutorTotal - 1 < numExecutorsTarget) {
    -        logDebug(s"Not removing idle executor $executorIdToBeRemoved 
because there are only " +
    -          s"$newExecutorTotal executor(s) left (number of executor target 
$numExecutorsTarget)")
    -      } else if (canBeKilled(executorIdToBeRemoved)) {
    -        executorIdsToBeRemoved += executorIdToBeRemoved
    -        newExecutorTotal -= 1
    -      }
    -    }
    +    val numExistingExecs = allocationManager.executorIds.size - 
executorsPendingToRemove.size
    +    val execCountFloor = Math.max(minNumExecutors, numExecutorsTarget)
    --- End diff --
    
    nit: `math.max` is more Scala-y.


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to