Github user ifilonenko commented on a diff in the pull request:

    https://github.com/apache/spark/pull/21092#discussion_r186266469
  
    --- Diff: 
resource-managers/kubernetes/core/src/main/scala/org/apache/spark/deploy/k8s/KubernetesConf.scala
 ---
    @@ -101,17 +112,29 @@ private[spark] object KubernetesConf {
           appId: String,
           mainAppResource: Option[MainAppResource],
           mainClass: String,
    -      appArgs: Array[String]): 
KubernetesConf[KubernetesDriverSpecificConf] = {
    +      appArgs: Array[String],
    +      maybePyFiles: Option[String]): 
KubernetesConf[KubernetesDriverSpecificConf] = {
         val sparkConfWithMainAppJar = sparkConf.clone()
    +    val additionalFiles = mutable.ArrayBuffer.empty[String]
         mainAppResource.foreach {
    -      case JavaMainAppResource(res) =>
    -        val previousJars = sparkConf
    -          .getOption("spark.jars")
    -          .map(_.split(","))
    -          .getOrElse(Array.empty)
    -        if (!previousJars.contains(res)) {
    -          sparkConfWithMainAppJar.setJars(previousJars ++ Seq(res))
    -        }
    +        case JavaMainAppResource(res) =>
    +          val previousJars = sparkConf
    +            .getOption("spark.jars")
    +            .map(_.split(","))
    +            .getOrElse(Array.empty)
    +          if (!previousJars.contains(res)) {
    +            sparkConfWithMainAppJar.setJars(previousJars ++ Seq(res))
    +          }
    +        case nonJVM: NonJVMResource =>
    --- End diff --
    
    Because the R step should have the same amount of default MemoryOverhead. 
As should all NonJVMResources. 


---

---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to