Github user squito commented on a diff in the pull request: https://github.com/apache/spark/pull/5563#discussion_r33275843 --- Diff: core/src/main/scala/org/apache/spark/scheduler/cluster/mesos/MesosSchedulerUtils.scala --- @@ -86,10 +89,106 @@ private[mesos] trait MesosSchedulerUtils extends Logging { /** * Get the amount of resources for the specified type from the resource list */ - protected def getResource(res: List[Resource], name: String): Double = { + protected def getResource(res: JList[Resource], name: String): Double = { for (r <- res if r.getName == name) { return r.getScalar.getValue } 0.0 } + + + /** Helper method to get the key,value-set pair for a Mesos Attribute protobuf */ + private[mesos] def getAttribute(attr: Attribute): (String, Set[String]) = + (attr.getName, attr.getText.getValue.split(',').toSet) + + + /** Build a Mesos resource protobuf object */ + private[mesos] def createResource(resourceName: String, quantity: Double): Protos.Resource = { + Resource.newBuilder() + .setName(resourceName) + .setType(Value.Type.SCALAR) + .setScalar(Value.Scalar.newBuilder().setValue(quantity).build()) + .build() + } + + + /** + * Match the requirements (if any) to the offer attributes. + * if attribute requirements are not specified - return true + * else if attribute is defined and no values are given, simple attribute presence is preformed + * else if attribute name and value is specified, subset match is performed on slave attributes + */ + private[mesos] def matchesAttributeRequirements( + slaveOfferConstraints: Map[String, Set[String]], + offerAttributes: Map[String, Set[String]]): Boolean = + if (slaveOfferConstraints.isEmpty) { + true + } else { + slaveOfferConstraints.forall { + // offer has the required attribute and subsumes the required values for that attribute + case (name, requiredValues) => + // The attributes and their values are case sensitive during comparison + // i.e tachyon -> true != Tachyon -> true != tachyon -> True + offerAttributes.contains(name) && requiredValues.subsetOf(offerAttributes(name)) + + } + } + + /** + * Parses the attributes constraints provided to spark and build a matching data struct: + * Map[<attribute-name>, Set[values-to-match] + * The constraints are specified as ';' separated key-value pairs where keys and values + * are separated by ':'. The ':' implies equality. For example: + * {{{ + * parseConstraintString("tachyon:true;zone:us-east-1a,us-east-1b") + * // would result in + * <code> + * Map( + * "tachyon" -> Set("true"), + * "zone": -> Set("us-east-1a", "us-east-1b") + * ) + * }}} + * @param constraintsVal constaints string consisting of ';' separated key-value pairs (separated + * by ':') + * @return Map of constraints to match resources offers. + */ + private[mesos] def parseConstraintString(constraintsVal: String): Map[String, Set[String]] = { + /* + Based on mesos docs: + attributes : attribute ( ";" attribute )* + attribute : labelString ":" ( labelString | "," )+ + labelString : [a-zA-Z0-9_/.-] + */ + val splitter = Splitter.on(';').trimResults().withKeyValueSeparator(':') + // kv splitter + if (constraintsVal.isEmpty) { + Map() + } else { + try { + Map() ++ mapAsScalaMap(splitter.split(constraintsVal)).map { + case (k, v) => + if (v == null) { + (k, Set[String]()) --- End diff -- actually, I don't think this case is possible -- you already have a test for keys without values, and if the value is empty, then `v == ""`. Whatever the right semantics are, I'd add a test case for args like "tachyon:" ```scala scala> splitter.split("tachyon:") res4: java.util.Map[String,String] = {tachyon=} scala> res4.get("tachyon") res5: String = "" ```
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org