Github user mgummelt commented on a diff in the pull request:

    https://github.com/apache/spark/pull/11157#discussion_r74156824
  
    --- Diff: 
core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala
 ---
    @@ -358,6 +376,119 @@ private[mesos] trait MesosSchedulerUtils extends 
Logging {
       }
     
       /**
    +   * Checks executor ports if they are within some range of the offered 
list of ports ranges,
    +   *
    +   * @param conf the Spark Config
    +   * @param ports the list of ports to check
    +   * @return true if ports are within range false otherwise
    +   */
    +  protected def checkPorts(conf: SparkConf, ports: List[(Long, Long)]): 
Boolean = {
    +
    +    def checkIfInRange(port: Long, ps: List[(Long, Long)]): Boolean = {
    +      ps.exists(r => r._1 <= port & r._2 >= port)
    +    }
    +
    +    val portsToCheck = nonZeroPortValuesFromConfig(conf)
    +    val withinRange = portsToCheck.forall(p => checkIfInRange(p, ports))
    +    // make sure we have enough ports to allocate per offer
    +    ports.map(r => r._2 - r._1 + 1).sum >= portsToCheck.size && withinRange
    +  }
    +
    +  /**
    +   * Partitions port resources.
    +   *
    +   * @param requestedPorts non-zero ports to assign
    +   * @param offeredResources the resources offered
    +   * @return resources left, port resources to be used.
    +   */
    +  def partitionPortResources(requestedPorts: List[Long], offeredResources: 
List[Resource])
    +    : (List[Resource], List[Resource]) = {
    +    if (requestedPorts.isEmpty) {
    --- End diff --
    
    Saving a few cycles is not worth the added complexity of special casing.  
Please remove this.


---
If your project is set up for it, you can reply to this email and have your
reply appear on GitHub as well. If your project does not have this feature
enabled and wishes so, or if the feature is enabled but not working, please
contact infrastructure at infrastruct...@apache.org or file a JIRA ticket
with INFRA.
---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to