Ngone51 commented on code in PR #36716:
URL: https://github.com/apache/spark/pull/36716#discussion_r903253826


##########
core/src/main/scala/org/apache/spark/resource/ResourceProfileManager.scala:
##########
@@ -63,17 +64,28 @@ private[spark] class ResourceProfileManager(sparkConf: 
SparkConf,
    */
   private[spark] def isSupported(rp: ResourceProfile): Boolean = {
     val isNotDefaultProfile = rp.id != 
ResourceProfile.DEFAULT_RESOURCE_PROFILE_ID
-    val notYarnOrK8sAndNotDefaultProfile = isNotDefaultProfile && !(isYarn || 
isK8s)
-    val YarnOrK8sNotDynAllocAndNotDefaultProfile =
-      isNotDefaultProfile && (isYarn || isK8s) && !dynamicEnabled
+    val notYarnOrK8sOrStandaloneAndNotDefaultProfile =
+      isNotDefaultProfile && !(isYarn || isK8s || isStandalone)
+    val YarnOrK8sOrStandaloneNotDynAllocAndNotDefaultProfile =
+      isNotDefaultProfile && (isYarn || isK8s || isStandalone) && 
!dynamicEnabled
     // We want the exception to be thrown only when we are specifically 
testing for the
     // exception or in a real application. Otherwise in all other testing 
scenarios we want
     // to skip throwing the exception so that we can test in other modes to 
make testing easier.
     if ((notRunningUnitTests || testExceptionThrown) &&
-        (notYarnOrK8sAndNotDefaultProfile || 
YarnOrK8sNotDynAllocAndNotDefaultProfile)) {
+        (notYarnOrK8sOrStandaloneAndNotDefaultProfile ||
+          YarnOrK8sOrStandaloneNotDynAllocAndNotDefaultProfile)) {
       throw new SparkException("ResourceProfiles are only supported on YARN 
and Kubernetes " +
-        "with dynamic allocation enabled.")
+        "and Standalone with dynamic allocation enabled.")
     }
+
+    if (isStandalone && rp.getExecutorCores.isEmpty &&
+      sparkConf.getOption(config.EXECUTOR_CORES.key).isEmpty) {
+      logWarning(s"Executor cores is not set for resource profile: ${rp.id}, 
and " +
+        s"spark.executor.cores is also not specified, you may get more 
executors allocated than " +
+        s"expected. It's recommended to set executor cores explicitly. Check 
this issue " +

Review Comment:
   nit: "Neither executor cores is set for resource profile, nor 
spark.executor.cores is explicitly set, you may ..."



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org

For queries about this service, please contact Infrastructure at:
us...@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org
For additional commands, e-mail: reviews-h...@spark.apache.org

Reply via email to