This is an automated email from the ASF dual-hosted git repository.

srowen pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 1ebf3e50024 [SPARK-39110][WEBUI] Add metrics properties to environment 
tab
1ebf3e50024 is described below

commit 1ebf3e500243003b584d21ff024fba2c11ac7dac
Author: Angerszhuuuu <angers....@gmail.com>
AuthorDate: Sun May 8 08:13:03 2022 -0500

    [SPARK-39110][WEBUI] Add metrics properties to environment tab
    
    ### What changes were proposed in this pull request?
    There are three ways to define metrics properties:
    
    1. Default metrics properties
    2. metrics.properties file under $SPARK_CONF_DIR
    3. spark.metrics.conf  to include a metrics properties file
    4. spark.metrics.conf.xx.xx
    
    Many new users always confused when test with metrics system, i think we 
can add final metrics properties in the environment tab, to let user can 
directly know which metrics are working.
    
    <img width="1757" alt="截屏2022-05-06 上午11 23 04" 
src="https://user-images.githubusercontent.com/46485123/167062876-c0c98a69-13c7-4a25-bb31-74f1ada88153.png";>
    
    <img width="1786" alt="截屏2022-05-06 上午11 33 00" 
src="https://user-images.githubusercontent.com/46485123/167062893-f297eeda-b08f-4c9d-a2a2-a74add97493f.png";>
    
    ### Why are the changes needed?
    Make user clear about which metrics properties are working
    
    ### Does this PR introduce _any_ user-facing change?
    user can see working metrics properties in UI  environment tag
    
    ### How was this patch tested?
    MT
    
    Closes #36462 from AngersZhuuuu/SPARK-39110.
    
    Authored-by: Angerszhuuuu <angers....@gmail.com>
    Signed-off-by: Sean Owen <sro...@gmail.com>
---
 .../src/main/resources/org/apache/spark/ui/static/webui.js |  1 +
 core/src/main/scala/org/apache/spark/SparkContext.scala    |  3 ++-
 core/src/main/scala/org/apache/spark/SparkEnv.scala        |  9 +++++----
 .../scala/org/apache/spark/metrics/MetricsSystem.scala     |  2 ++
 .../scala/org/apache/spark/status/AppStatusListener.scala  |  1 +
 .../spark/status/api/v1/OneApplicationResource.scala       |  1 +
 .../main/scala/org/apache/spark/status/api/v1/api.scala    |  1 +
 .../scala/org/apache/spark/ui/env/EnvironmentPage.scala    | 14 ++++++++++++++
 .../main/scala/org/apache/spark/util/JsonProtocol.scala    |  5 +++++
 .../app_environment_expectation.json                       |  6 ++++++
 .../multiple_resource_profiles_expectation.json            |  1 +
 .../test/resources/spark-events/app-20161116163331-0000    |  2 +-
 .../spark/deploy/history/FsHistoryProviderSuite.scala      |  3 +++
 .../apache/spark/scheduler/EventLoggingListenerSuite.scala |  2 +-
 .../scala/org/apache/spark/util/JsonProtocolSuite.scala    |  5 +++++
 project/MimaExcludes.scala                                 |  5 ++++-
 16 files changed, 53 insertions(+), 8 deletions(-)

diff --git a/core/src/main/resources/org/apache/spark/ui/static/webui.js 
b/core/src/main/resources/org/apache/spark/ui/static/webui.js
index c149f2d8433..b365082c1e1 100644
--- a/core/src/main/resources/org/apache/spark/ui/static/webui.js
+++ b/core/src/main/resources/org/apache/spark/ui/static/webui.js
@@ -73,6 +73,7 @@ $(function() {
   
collapseTablePageLoad('collapse-aggregated-sparkProperties','aggregated-sparkProperties');
   
collapseTablePageLoad('collapse-aggregated-hadoopProperties','aggregated-hadoopProperties');
   
collapseTablePageLoad('collapse-aggregated-systemProperties','aggregated-systemProperties');
+  
collapseTablePageLoad('collapse-aggregated-metricsProperties','aggregated-metricsProperties');
   
collapseTablePageLoad('collapse-aggregated-classpathEntries','aggregated-classpathEntries');
   
collapseTablePageLoad('collapse-aggregated-activeJobs','aggregated-activeJobs');
   
collapseTablePageLoad('collapse-aggregated-completedJobs','aggregated-completedJobs');
diff --git a/core/src/main/scala/org/apache/spark/SparkContext.scala 
b/core/src/main/scala/org/apache/spark/SparkContext.scala
index c6cb5cb5e19..a106d5bacc5 100644
--- a/core/src/main/scala/org/apache/spark/SparkContext.scala
+++ b/core/src/main/scala/org/apache/spark/SparkContext.scala
@@ -2590,7 +2590,8 @@ class SparkContext(config: SparkConf) extends Logging {
       val addedFilePaths = addedFiles.keys.toSeq
       val addedArchivePaths = addedArchives.keys.toSeq
       val environmentDetails = SparkEnv.environmentDetails(conf, 
hadoopConfiguration,
-        schedulingMode, addedJarPaths, addedFilePaths, addedArchivePaths)
+        schedulingMode, addedJarPaths, addedFilePaths, addedArchivePaths,
+        env.metricsSystem.metricsProperties.asScala.toMap)
       val environmentUpdate = 
SparkListenerEnvironmentUpdate(environmentDetails)
       listenerBus.post(environmentUpdate)
     }
diff --git a/core/src/main/scala/org/apache/spark/SparkEnv.scala 
b/core/src/main/scala/org/apache/spark/SparkEnv.scala
index 19467e7eca1..66ee959dbd8 100644
--- a/core/src/main/scala/org/apache/spark/SparkEnv.scala
+++ b/core/src/main/scala/org/apache/spark/SparkEnv.scala
@@ -429,14 +429,14 @@ object SparkEnv extends Logging {
    * class paths. Map keys define the category, and map values represent the 
corresponding
    * attributes as a sequence of KV pairs. This is used mainly for 
SparkListenerEnvironmentUpdate.
    */
-  private[spark]
-  def environmentDetails(
+  private[spark] def environmentDetails(
       conf: SparkConf,
       hadoopConf: Configuration,
       schedulingMode: String,
       addedJars: Seq[String],
       addedFiles: Seq[String],
-      addedArchives: Seq[String]): Map[String, Seq[(String, String)]] = {
+      addedArchives: Seq[String],
+      metricsProperties: Map[String, String]): Map[String, Seq[(String, 
String)]] = {
 
     import Properties._
     val jvmInformation = Seq(
@@ -478,6 +478,7 @@ object SparkEnv extends Logging {
       "Spark Properties" -> sparkProperties,
       "Hadoop Properties" -> hadoopProperties,
       "System Properties" -> otherProperties,
-      "Classpath Entries" -> classPaths)
+      "Classpath Entries" -> classPaths,
+      "Metrics Properties" -> metricsProperties.toSeq.sorted)
   }
 }
diff --git a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala 
b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
index a5903deeb78..caf3c344d64 100644
--- a/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
+++ b/core/src/main/scala/org/apache/spark/metrics/MetricsSystem.scala
@@ -233,6 +233,8 @@ private[spark] class MetricsSystem private (
       }
     }
   }
+
+  def metricsProperties(): Properties = metricsConfig.properties
 }
 
 private[spark] object MetricsSystem {
diff --git 
a/core/src/main/scala/org/apache/spark/status/AppStatusListener.scala 
b/core/src/main/scala/org/apache/spark/status/AppStatusListener.scala
index add9862c306..ea028dfd11d 100644
--- a/core/src/main/scala/org/apache/spark/status/AppStatusListener.scala
+++ b/core/src/main/scala/org/apache/spark/status/AppStatusListener.scala
@@ -179,6 +179,7 @@ private[spark] class AppStatusListener(
       details.getOrElse("Spark Properties", Nil),
       details.getOrElse("Hadoop Properties", Nil),
       details.getOrElse("System Properties", Nil),
+      details.getOrElse("Metrics Properties", Nil),
       details.getOrElse("Classpath Entries", Nil),
       Nil)
 
diff --git 
a/core/src/main/scala/org/apache/spark/status/api/v1/OneApplicationResource.scala
 
b/core/src/main/scala/org/apache/spark/status/api/v1/OneApplicationResource.scala
index ef17168ebce..a52539c542e 100644
--- 
a/core/src/main/scala/org/apache/spark/status/api/v1/OneApplicationResource.scala
+++ 
b/core/src/main/scala/org/apache/spark/status/api/v1/OneApplicationResource.scala
@@ -111,6 +111,7 @@ private[v1] class AbstractApplicationResource extends 
BaseAppResource {
       Utils.redact(ui.conf, envInfo.sparkProperties).sortBy(_._1),
       Utils.redact(ui.conf, envInfo.hadoopProperties).sortBy(_._1),
       Utils.redact(ui.conf, envInfo.systemProperties).sortBy(_._1),
+      Utils.redact(ui.conf, envInfo.metricsProperties).sortBy(_._1),
       envInfo.classpathEntries.sortBy(_._1),
       resourceProfileInfo)
   }
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/api.scala 
b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
index d1a4d602fc5..ba21d5d1f62 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
@@ -459,6 +459,7 @@ class ApplicationEnvironmentInfo private[spark] (
     val sparkProperties: Seq[(String, String)],
     val hadoopProperties: Seq[(String, String)],
     val systemProperties: Seq[(String, String)],
+    val metricsProperties: Seq[(String, String)],
     val classpathEntries: Seq[(String, String)],
     val resourceProfiles: Seq[ResourceProfileInfo])
 
diff --git a/core/src/main/scala/org/apache/spark/ui/env/EnvironmentPage.scala 
b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentPage.scala
index 2f5b7311892..c6e224732cb 100644
--- a/core/src/main/scala/org/apache/spark/ui/env/EnvironmentPage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/env/EnvironmentPage.scala
@@ -83,6 +83,9 @@ private[ui] class EnvironmentPage(
     val systemPropertiesTable = UIUtils.listingTable(propertyHeader, 
propertyRow,
       Utils.redact(conf, appEnv.systemProperties.sorted), fixedWidth = true,
       headerClasses = headerClasses)
+    val metricsPropertiesTable = UIUtils.listingTable(propertyHeader, 
propertyRow,
+      Utils.redact(conf, appEnv.metricsProperties.sorted), fixedWidth = true,
+      headerClasses = headerClasses)
     val classpathEntriesTable = UIUtils.listingTable(
       classPathHeader, classPathRow, appEnv.classpathEntries.sorted, 
fixedWidth = true,
       headerClasses = headerClasses)
@@ -143,6 +146,17 @@ private[ui] class EnvironmentPage(
         <div class="aggregated-systemProperties collapsible-table collapsed">
           {systemPropertiesTable}
         </div>
+        <span class="collapse-aggregated-metricsProperties collapse-table"
+              onClick="collapseTable('collapse-aggregated-metricsProperties',
+            'aggregated-metricsProperties')">
+          <h4>
+            <span class="collapse-table-arrow arrow-closed"></span>
+            <a>Metrics Properties</a>
+          </h4>
+        </span>
+        <div class="aggregated-metricsProperties collapsible-table collapsed">
+          {metricsPropertiesTable}
+        </div>
         <span class="collapse-aggregated-classpathEntries collapse-table"
             onClick="collapseTable('collapse-aggregated-classpathEntries',
             'aggregated-classpathEntries')">
diff --git a/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala 
b/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala
index 09f1be2076c..ef5a812e4b6 100644
--- a/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala
+++ b/core/src/main/scala/org/apache/spark/util/JsonProtocol.scala
@@ -180,12 +180,14 @@ private[spark] object JsonProtocol {
     val sparkProperties = mapToJson(environmentDetails("Spark 
Properties").toMap)
     val hadoopProperties = mapToJson(environmentDetails("Hadoop 
Properties").toMap)
     val systemProperties = mapToJson(environmentDetails("System 
Properties").toMap)
+    val metricsProperties = mapToJson(environmentDetails("Metrics 
Properties").toMap)
     val classpathEntries = mapToJson(environmentDetails("Classpath 
Entries").toMap)
     ("Event" -> SPARK_LISTENER_EVENT_FORMATTED_CLASS_NAMES.environmentUpdate) ~
     ("JVM Information" -> jvmInformation) ~
     ("Spark Properties" -> sparkProperties) ~
     ("Hadoop Properties" -> hadoopProperties) ~
     ("System Properties" -> systemProperties) ~
+    ("Metrics Properties"-> metricsProperties) ~
     ("Classpath Entries" -> classpathEntries)
   }
 
@@ -780,11 +782,14 @@ private[spark] object JsonProtocol {
     // For compatible with previous event logs
     val hadoopProperties = jsonOption(json \ "Hadoop 
Properties").map(mapFromJson(_).toSeq)
       .getOrElse(Seq.empty)
+    val metricsProperties = jsonOption(json \ "Metrics 
Properties").map(mapFromJson(_).toSeq)
+      .getOrElse(Seq.empty)
     val environmentDetails = Map[String, Seq[(String, String)]](
       "JVM Information" -> mapFromJson(json \ "JVM Information").toSeq,
       "Spark Properties" -> mapFromJson(json \ "Spark Properties").toSeq,
       "Hadoop Properties" -> hadoopProperties,
       "System Properties" -> mapFromJson(json \ "System Properties").toSeq,
+      "Metrics Properties" -> metricsProperties,
       "Classpath Entries" -> mapFromJson(json \ "Classpath Entries").toSeq)
     SparkListenerEnvironmentUpdate(environmentDetails)
   }
diff --git 
a/core/src/test/resources/HistoryServerExpectations/app_environment_expectation.json
 
b/core/src/test/resources/HistoryServerExpectations/app_environment_expectation.json
index c2616129de9..b03416eec94 100644
--- 
a/core/src/test/resources/HistoryServerExpectations/app_environment_expectation.json
+++ 
b/core/src/test/resources/HistoryServerExpectations/app_environment_expectation.json
@@ -97,6 +97,12 @@
     [ "user.name", "jose" ],
     [ "user.timezone", "America/Chicago" ]
   ],
+  "metricsProperties" : [
+    [ "*.sink.servlet.class", "org.apache.spark.metrics.sink.MetricsServlet" ],
+    [ "*.sink.servlet.path", "/metrics/json" ],
+    [ "applications.sink.servlet.path", "/metrics/applications/json" ],
+    [ "master.sink.servlet.path", "/metrics/master/json" ]
+  ],
   "classpathEntries" : [
     [ 
"/Users/Jose/IdeaProjects/spark/assembly/target/scala-2.11/jars/RoaringBitmap-0.5.11.jar",
 "System Classpath" ],
     [ 
"/Users/Jose/IdeaProjects/spark/assembly/target/scala-2.11/jars/antlr4-runtime-4.5.3.jar",
 "System Classpath" ],
diff --git 
a/core/src/test/resources/HistoryServerExpectations/multiple_resource_profiles_expectation.json
 
b/core/src/test/resources/HistoryServerExpectations/multiple_resource_profiles_expectation.json
index 5c1e4cc2337..6b2c18bb0fa 100644
--- 
a/core/src/test/resources/HistoryServerExpectations/multiple_resource_profiles_expectation.json
+++ 
b/core/src/test/resources/HistoryServerExpectations/multiple_resource_profiles_expectation.json
@@ -7,6 +7,7 @@
   "sparkProperties" : [ ],
   "hadoopProperties" : [ ],
   "systemProperties" : [ ],
+  "metricsProperties": [ ],
   "classpathEntries" : [ ],
   "resourceProfiles" : [ {
     "id" : 0,
diff --git a/core/src/test/resources/spark-events/app-20161116163331-0000 
b/core/src/test/resources/spark-events/app-20161116163331-0000
index 8f77fdd34f4..3e7565beff8 100755
--- a/core/src/test/resources/spark-events/app-20161116163331-0000
+++ b/core/src/test/resources/spark-events/app-20161116163331-0000
@@ -1,6 +1,6 @@
 {"Event":"SparkListenerLogStart","Spark Version":"2.1.0-SNAPSHOT"}
 {"Event":"SparkListenerBlockManagerAdded","Block Manager ID":{"Executor 
ID":"driver","Host":"172.22.0.167","Port":51475},"Maximum 
Memory":908381388,"Timestamp":1479335611477,"Maximum Onheap 
Memory":384093388,"Maximum Offheap Memory":524288000}
-{"Event":"SparkListenerEnvironmentUpdate","JVM Information":{"Java 
Home":"/Library/Java/JavaVirtualMachines/jdk1.8.0_92.jdk/Contents/Home/jre","Java
 Version":"1.8.0_92 (Oracle Corporation)","Scala Version":"version 
2.11.8"},"Spark 
Properties":{"spark.blacklist.task.maxTaskAttemptsPerExecutor":"3","spark.blacklist.enabled":"TRUE","spark.driver.host":"172.22.0.167","spark.blacklist.task.maxTaskAttemptsPerNode":"3","spark.eventLog.enabled":"TRUE","spark.driver.port":"51459","spark.repl.clas
 [...]
+{"Event":"SparkListenerEnvironmentUpdate","JVM Information":{"Java 
Home":"/Library/Java/JavaVirtualMachines/jdk1.8.0_92.jdk/Contents/Home/jre","Java
 Version":"1.8.0_92 (Oracle Corporation)","Scala Version":"version 
2.11.8"},"Spark 
Properties":{"spark.blacklist.task.maxTaskAttemptsPerExecutor":"3","spark.blacklist.enabled":"TRUE","spark.driver.host":"172.22.0.167","spark.blacklist.task.maxTaskAttemptsPerNode":"3","spark.eventLog.enabled":"TRUE","spark.driver.port":"51459","spark.repl.clas
 [...]
 {"Event":"SparkListenerApplicationStart","App Name":"Spark shell","App 
ID":"app-20161116163331-0000","Timestamp":1479335609916,"User":"jose"}
 {"Event":"SparkListenerExecutorAdded","Timestamp":1479335615320,"Executor 
ID":"3","Executor Info":{"Host":"172.22.0.167","Total Cores":4,"Log 
Urls":{"stdout":"http://172.22.0.167:51466/logPage/?appId=app-20161116163331-0000&executorId=3&logType=stdout","stderr":"http://172.22.0.167:51466/logPage/?appId=app-20161116163331-0000&executorId=3&logType=stderr"}}}
 {"Event":"SparkListenerBlockManagerAdded","Block Manager ID":{"Executor 
ID":"3","Host":"172.22.0.167","Port":51485},"Maximum 
Memory":908381388,"Timestamp":1479335615387,"Maximum Onheap 
Memory":384093388,"Maximum Offheap Memory":524288000}
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
 
b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
index 2d802b81763..78abbe93b79 100644
--- 
a/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/deploy/history/FsHistoryProviderSuite.scala
@@ -829,6 +829,7 @@ abstract class FsHistoryProviderSuite extends SparkFunSuite 
with Matchers with L
             "Hadoop Properties" -> Seq.empty,
             "JVM Information" -> Seq.empty,
             "System Properties" -> Seq.empty,
+            "Metrics Properties" -> Seq.empty,
             "Classpath Entries" -> Seq.empty
           )),
           SparkListenerApplicationEnd(System.currentTimeMillis()))
@@ -1088,6 +1089,7 @@ abstract class FsHistoryProviderSuite extends 
SparkFunSuite with Matchers with L
           "Hadoop Properties" -> Seq.empty,
           "JVM Information" -> Seq.empty,
           "System Properties" -> Seq.empty,
+          "Metrics Properties" -> Seq.empty,
           "Classpath Entries" -> Seq.empty
         )),
         SparkListenerApplicationEnd(5L)
@@ -1573,6 +1575,7 @@ abstract class FsHistoryProviderSuite extends 
SparkFunSuite with Matchers with L
         "Hadoop Properties" -> Seq.empty,
         "JVM Information" -> Seq.empty,
         "System Properties" -> Seq.empty,
+        "Metrics Properties" -> Seq.empty,
         "Classpath Entries" -> Seq.empty
       )),
       SparkListenerApplicationEnd(System.currentTimeMillis()))
diff --git 
a/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
 
b/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
index d7cd63a7c21..d790c35a330 100644
--- 
a/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
+++ 
b/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
@@ -91,7 +91,7 @@ class EventLoggingListenerSuite extends SparkFunSuite with 
LocalSparkContext wit
       .set(key, secretPassword)
     val hadoopconf = SparkHadoopUtil.get.newConfiguration(new SparkConf())
     val envDetails = SparkEnv.environmentDetails(
-      conf, hadoopconf, "FIFO", Seq.empty, Seq.empty, Seq.empty)
+      conf, hadoopconf, "FIFO", Seq.empty, Seq.empty, Seq.empty, Map.empty)
     val event = SparkListenerEnvironmentUpdate(envDetails)
     val redactedProps = EventLoggingListener
       .redactEvent(conf, event).environmentDetails("Spark Properties").toMap
diff --git a/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala 
b/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
index 5800dbda297..572ac6fc8bb 100644
--- a/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/JsonProtocolSuite.scala
@@ -78,6 +78,8 @@ class JsonProtocolSuite extends SparkFunSuite {
       "Spark Properties" -> Seq(("Job throughput", "80000 jobs/s, regardless 
of job type")),
       "Hadoop Properties" -> Seq(("hadoop.tmp.dir", "/usr/local/hadoop/tmp")),
       "System Properties" -> Seq(("Username", "guest"), ("Password", "guest")),
+      "Metrics Properties" ->
+        Seq(("*.sink.servlet.class", 
"org.apache.spark.metrics.sink.MetricsServlet")),
       "Classpath Entries" -> Seq(("Super library", "/tmp/super_library"))
     ))
     val blockManagerAdded = SparkListenerBlockManagerAdded(1L,
@@ -2048,6 +2050,9 @@ private[spark] object JsonProtocolSuite extends 
Assertions {
       |    "Username": "guest",
       |    "Password": "guest"
       |  },
+      |  "Metrics Properties": {
+      |    "*.sink.servlet.class": 
"org.apache.spark.metrics.sink.MetricsServlet"
+      |  },
       |  "Classpath Entries": {
       |    "Super library": "/tmp/super_library"
       |  }
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index f0010dfcd83..927384d4f1e 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -37,7 +37,10 @@ object MimaExcludes {
   // Exclude rules for 3.4.x
   lazy val v34excludes = v33excludes ++ Seq(
     
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.recommendation.ALS.checkedCast"),
-    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.recommendation.ALSModel.checkedCast")
+    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.ml.recommendation.ALSModel.checkedCast"),
+
+    // [SPARK-39110] Show metrics properties in HistoryServer environment tab
+    
ProblemFilters.exclude[DirectMissingMethodProblem]("org.apache.spark.status.api.v1.ApplicationEnvironmentInfo.this")
   )
 
   // Exclude rules for 3.3.x from 3.2.0


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to