This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new b14c1f036f8f [SPARK-45763][CORE][UI] Improve `MasterPage` to show 
`Resource` column only when it exists
b14c1f036f8f is described below

commit b14c1f036f8f394ad1903998128c05d04dd584a9
Author: Dongjoon Hyun <dh...@apple.com>
AuthorDate: Wed Nov 1 13:31:12 2023 -0700

    [SPARK-45763][CORE][UI] Improve `MasterPage` to show `Resource` column only 
when it exists
    
    ### What changes were proposed in this pull request?
    
    This PR aims to improve `MasterPage` to show `Resource` column only when it 
exists.
    
    ### Why are the changes needed?
    
    For non-GPU clusters, `Resource` column is empty always.
    
    ### Does this PR introduce _any_ user-facing change?
    
    After this PR, `MasterPage` still shows `Resource` column if the resource 
exists like the following.
    
    ![Screenshot 2023-11-01 at 11 02 43 
AM](https://github.com/apache/spark/assets/9700541/104dd4e7-938b-4269-8952-512e8fb5fa39)
    
    If there is no resource on all workers, the `Resource` column is omitted.
    
    ![Screenshot 2023-11-01 at 11 03 20 
AM](https://github.com/apache/spark/assets/9700541/12c9d4b2-330a-4e36-a6eb-ac2813e0649a)
    
    ### How was this patch tested?
    
    Manual test.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #43628 from dongjoon-hyun/SPARK-45763.
    
    Authored-by: Dongjoon Hyun <dh...@apple.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../org/apache/spark/deploy/master/ui/MasterPage.scala    | 15 +++++++++++----
 1 file changed, 11 insertions(+), 4 deletions(-)

diff --git 
a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala 
b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala
index 48c0c9601c14..cb325b37958e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala
@@ -98,10 +98,15 @@ private[ui] class MasterPage(parent: MasterWebUI) extends 
WebUIPage("") {
   def render(request: HttpServletRequest): Seq[Node] = {
     val state = getMasterState
 
-    val workerHeaders = Seq("Worker Id", "Address", "State", "Cores", 
"Memory", "Resources")
+    val showResourceColumn = 
state.workers.filter(_.resourcesInfoUsed.nonEmpty).nonEmpty
+    val workerHeaders = if (showResourceColumn) {
+      Seq("Worker Id", "Address", "State", "Cores", "Memory", "Resources")
+    } else {
+      Seq("Worker Id", "Address", "State", "Cores", "Memory")
+    }
     val workers = state.workers.sortBy(_.id)
     val aliveWorkers = state.workers.filter(_.state == WorkerState.ALIVE)
-    val workerTable = UIUtils.listingTable(workerHeaders, workerRow, workers)
+    val workerTable = UIUtils.listingTable(workerHeaders, 
workerRow(showResourceColumn), workers)
 
     val appHeaders = Seq("Application ID", "Name", "Cores", "Memory per 
Executor",
       "Resources Per Executor", "Submitted Time", "User", "State", "Duration")
@@ -256,7 +261,7 @@ private[ui] class MasterPage(parent: MasterWebUI) extends 
WebUIPage("") {
     UIUtils.basicSparkPage(request, content, "Spark Master at " + state.uri)
   }
 
-  private def workerRow(worker: WorkerInfo): Seq[Node] = {
+  private def workerRow(showResourceColumn: Boolean): WorkerInfo => Seq[Node] 
= worker => {
     <tr>
       <td>
         {
@@ -276,7 +281,9 @@ private[ui] class MasterPage(parent: MasterWebUI) extends 
WebUIPage("") {
         {Utils.megabytesToString(worker.memory)}
         ({Utils.megabytesToString(worker.memoryUsed)} Used)
       </td>
-      <td>{formatWorkerResourcesDetails(worker)}</td>
+      {if (showResourceColumn) {
+        <td>{formatWorkerResourcesDetails(worker)}</td>
+      }}
     </tr>
   }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to