This is an automated email from the ASF dual-hosted git repository.

dongjoon pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 24b4581fa818 [SPARK-47894][CORE][WEBUI] Add `Environment` page to 
Master UI
24b4581fa818 is described below

commit 24b4581fa818da89a5aff57437addcece707e678
Author: Dongjoon Hyun <dh...@apple.com>
AuthorDate: Wed Apr 17 20:29:54 2024 -0700

    [SPARK-47894][CORE][WEBUI] Add `Environment` page to Master UI
    
    ### What changes were proposed in this pull request?
    
    This PR aims to add `Environment` page to `Spark Master UI`.
    
    ### Why are the changes needed?
    
    To improve `Spark Standalone` cluster UX by providing `Spark Master` JVM's 
information
    - `Runtime Information`
    - `Spark Properties`
    - `Hadoop Properties`
    - `System Properties`
    - `Metrics Properties`
    - `Classpath Entries`
    
    <img width="371" alt="Screenshot 2024-04-17 at 17 08 09" 
src="https://github.com/apache/spark/assets/9700541/2b02abbd-e08f-4b0f-834a-160ea6fd00c7";>
    
    <img width="612" alt="Screenshot 2024-04-17 at 17 09 02" 
src="https://github.com/apache/spark/assets/9700541/664d113a-b677-41a7-9e8c-841e087aae1d";>
    
    ### Does this PR introduce _any_ user-facing change?
    
    Yes, but this is a new UI.
    
    ### How was this patch tested?
    
    Pass the CIs with the newly added test case.
    
    Or manual check the UI after running `Master`.
    
    ```
    $ SPARK_MASTER_OPTS="-Dspark.master.rest.enabled=true 
-Dspark.deploy.maxDrivers=2" sbin/start-master.sh
    ```
    
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No.
    
    Closes #46111 from dongjoon-hyun/SPARK-47894.
    
    Authored-by: Dongjoon Hyun <dh...@apple.com>
    Signed-off-by: Dongjoon Hyun <dh...@apple.com>
---
 .../spark/deploy/master/ui/EnvironmentPage.scala   | 141 +++++++++++++++++++++
 .../apache/spark/deploy/master/ui/MasterPage.scala |   5 +-
 .../spark/deploy/master/ui/MasterWebUI.scala       |   5 +
 .../master/ui/ReadOnlyMasterWebUISuite.scala       |  14 +-
 4 files changed, 162 insertions(+), 3 deletions(-)

diff --git 
a/core/src/main/scala/org/apache/spark/deploy/master/ui/EnvironmentPage.scala 
b/core/src/main/scala/org/apache/spark/deploy/master/ui/EnvironmentPage.scala
new file mode 100644
index 000000000000..190e821524ba
--- /dev/null
+++ 
b/core/src/main/scala/org/apache/spark/deploy/master/ui/EnvironmentPage.scala
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.deploy.master.ui
+
+import scala.xml.Node
+
+import jakarta.servlet.http.HttpServletRequest
+
+import org.apache.spark.{SparkConf, SparkEnv}
+import org.apache.spark.deploy.SparkHadoopUtil
+import org.apache.spark.ui._
+import org.apache.spark.util.Utils
+
+private[ui] class EnvironmentPage(
+    parent: MasterWebUI,
+    conf: SparkConf) extends WebUIPage("Environment") {
+
+  def render(request: HttpServletRequest): Seq[Node] = {
+    val details = SparkEnv.environmentDetails(conf, 
SparkHadoopUtil.get.newConfiguration(conf),
+      "", Seq.empty, Seq.empty, Seq.empty, Map.empty)
+    val jvmInformation = details("JVM Information").sorted
+    val sparkProperties = Utils.redact(conf, details("Spark 
Properties")).sorted
+    val hadoopProperties = Utils.redact(conf, details("Hadoop 
Properties")).sorted
+    val systemProperties = Utils.redact(conf, details("System 
Properties")).sorted
+    val metricsProperties = Utils.redact(conf, details("Metrics 
Properties")).sorted
+    val classpathEntries = details("Classpath Entries").sorted
+
+    val runtimeInformationTable = UIUtils.listingTable(propertyHeader, 
propertyRow,
+      jvmInformation, fixedWidth = true, headerClasses = headerClasses)
+    val sparkPropertiesTable = UIUtils.listingTable(propertyHeader, 
propertyRow,
+      sparkProperties, fixedWidth = true, headerClasses = headerClasses)
+    val hadoopPropertiesTable = UIUtils.listingTable(propertyHeader, 
propertyRow,
+      hadoopProperties, fixedWidth = true, headerClasses = headerClasses)
+    val systemPropertiesTable = UIUtils.listingTable(propertyHeader, 
propertyRow,
+      systemProperties, fixedWidth = true, headerClasses = headerClasses)
+    val metricsPropertiesTable = UIUtils.listingTable(propertyHeader, 
propertyRow,
+      metricsProperties, fixedWidth = true, headerClasses = headerClasses)
+    val classpathEntriesTable = UIUtils.listingTable(classPathHeader, 
classPathRow,
+      classpathEntries, fixedWidth = true, headerClasses = headerClasses)
+
+    val content =
+      <div>
+        <p><a href="/">Back to Master</a></p>
+      </div>
+      <span>
+        <span class="collapse-aggregated-runtimeInformation collapse-table"
+            onClick="collapseTable('collapse-aggregated-runtimeInformation',
+            'aggregated-runtimeInformation')">
+          <h4>
+            <span class="collapse-table-arrow arrow-open"></span>
+            <a>Runtime Information</a>
+          </h4>
+        </span>
+        <div class="aggregated-runtimeInformation collapsible-table">
+          {runtimeInformationTable}
+        </div>
+        <span class="collapse-aggregated-sparkProperties collapse-table"
+            onClick="collapseTable('collapse-aggregated-sparkProperties',
+            'aggregated-sparkProperties')">
+          <h4>
+            <span class="collapse-table-arrow arrow-open"></span>
+            <a>Spark Properties</a>
+          </h4>
+        </span>
+        <div class="aggregated-sparkProperties collapsible-table">
+          {sparkPropertiesTable}
+        </div>
+        <span class="collapse-aggregated-hadoopProperties collapse-table"
+              onClick="collapseTable('collapse-aggregated-hadoopProperties',
+            'aggregated-hadoopProperties')">
+          <h4>
+            <span class="collapse-table-arrow arrow-closed"></span>
+            <a>Hadoop Properties</a>
+          </h4>
+        </span>
+        <div class="aggregated-hadoopProperties collapsible-table collapsed">
+          {hadoopPropertiesTable}
+        </div>
+        <span class="collapse-aggregated-systemProperties collapse-table"
+            onClick="collapseTable('collapse-aggregated-systemProperties',
+            'aggregated-systemProperties')">
+          <h4>
+            <span class="collapse-table-arrow arrow-closed"></span>
+            <a>System Properties</a>
+          </h4>
+        </span>
+        <div class="aggregated-systemProperties collapsible-table collapsed">
+          {systemPropertiesTable}
+        </div>
+        <span class="collapse-aggregated-metricsProperties collapse-table"
+              onClick="collapseTable('collapse-aggregated-metricsProperties',
+            'aggregated-metricsProperties')">
+          <h4>
+            <span class="collapse-table-arrow arrow-closed"></span>
+            <a>Metrics Properties</a>
+          </h4>
+        </span>
+        <div class="aggregated-metricsProperties collapsible-table collapsed">
+          {metricsPropertiesTable}
+        </div>
+        <span class="collapse-aggregated-classpathEntries collapse-table"
+            onClick="collapseTable('collapse-aggregated-classpathEntries',
+            'aggregated-classpathEntries')">
+          <h4>
+            <span class="collapse-table-arrow arrow-closed"></span>
+            <a>Classpath Entries</a>
+          </h4>
+        </span>
+        <div class="aggregated-classpathEntries collapsible-table collapsed">
+          {classpathEntriesTable}
+        </div>
+      </span>
+    UIUtils.basicSparkPage(request, content, "Environment")
+  }
+
+  private def propertyHeader = Seq("Name", "Value")
+  private def classPathHeader = Seq("Resource", "Source")
+  private def headerClasses = Seq("sorttable_alpha", "sorttable_alpha")
+  private def headerClassesNoSortValues = Seq("sorttable_numeric", 
"sorttable_nosort")
+
+  private def jvmRowDataPre(kv: (String, String)) =
+    <tr><td>{kv._1}</td><td><pre>{kv._2}</pre></td></tr>
+  private def propertyRow(kv: (String, String)) = 
<tr><td>{kv._1}</td><td>{kv._2}</td></tr>
+  private def classPathRow(data: (String, String)) = 
<tr><td>{data._1}</td><td>{data._2}</td></tr>
+}
+
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala 
b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala
index d07f299d52ba..1248b1c368e7 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterPage.scala
@@ -169,8 +169,9 @@ private[ui] class MasterPage(parent: MasterWebUI) extends 
WebUIPage("") {
                 {state.completedDrivers.count(_.state == DriverState.ERROR)} 
Error,
                 {state.completedDrivers.count(_.state == 
DriverState.RELAUNCHING)} Relaunching)
               </li>
-              <li><strong>Status:</strong>
-                <a href={"/logPage/?self&logType=out"}>{state.status}</a>
+              <li><strong>Status:</strong> {state.status}
+                (<a href={"/environment"}>Environment</a>,
+                <a href={"/logPage/?self&logType=out"}>Log</a>)
               </li>
             </ul>
           </div>
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala 
b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
index da3c91956689..d7e650a794dd 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
@@ -54,6 +54,11 @@ class MasterWebUI(
     val masterPage = new MasterPage(this)
     attachPage(new ApplicationPage(this))
     attachPage(new LogPage(this))
+    val envPage = new EnvironmentPage(this, master.conf)
+    attachPage(envPage)
+    this.attachHandler(createServletHandler("/environment",
+      (request: HttpServletRequest) => envPage.render(request),
+      master.conf))
     attachPage(masterPage)
     addStaticHandler(MasterWebUI.STATIC_RESOURCE_DIR)
     addRenderLogHandler(this, master.conf)
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/master/ui/ReadOnlyMasterWebUISuite.scala
 
b/core/src/test/scala/org/apache/spark/deploy/master/ui/ReadOnlyMasterWebUISuite.scala
index d7f05754a7cd..20ff932eb01a 100644
--- 
a/core/src/test/scala/org/apache/spark/deploy/master/ui/ReadOnlyMasterWebUISuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/deploy/master/ui/ReadOnlyMasterWebUISuite.scala
@@ -17,7 +17,9 @@
 
 package org.apache.spark.deploy.master.ui
 
-import jakarta.servlet.http.HttpServletResponse.SC_METHOD_NOT_ALLOWED
+import scala.io.Source
+
+import jakarta.servlet.http.HttpServletResponse.{SC_METHOD_NOT_ALLOWED, SC_OK}
 import org.mockito.Mockito.{mock, when}
 
 import org.apache.spark.{SecurityManager, SparkConf, SparkFunSuite}
@@ -74,4 +76,14 @@ class ReadOnlyMasterWebUISuite extends SparkFunSuite {
     val body = convPostDataToString(hostnames.map(("host", _)))
     assert(sendHttpRequest(url, "POST", body).getResponseCode === 
SC_METHOD_NOT_ALLOWED)
   }
+
+  test("SPARK-47894: /environment") {
+    val url = 
s"http://${Utils.localHostNameForURI()}:${masterWebUI.boundPort}/environment"
+    val conn = sendHttpRequest(url, "GET", "")
+    assert(conn.getResponseCode === SC_OK)
+    val result = Source.fromInputStream(conn.getInputStream).mkString
+    assert(result.contains("Runtime Information"))
+    assert(result.contains("Spark Properties"))
+    assert(result.contains("Hadoop Properties"))
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to