Repository: spark
Updated Branches:
  refs/heads/master 9b88e1dca -> 08a7a836c


[SPARK-10565][CORE] add missing web UI stats to /api/v1/applications JSON

I looked at the other endpoints, and they don't seem to be missing any fields.
Added fields:
![image](https://cloud.githubusercontent.com/assets/613879/10948801/58159982-82e4-11e5-86dc-62da201af910.png)

Author: Charles Yeh <charles...@dropbox.com>

Closes #9472 from CharlesYeh/api_vars.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/08a7a836
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/08a7a836
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/08a7a836

Branch: refs/heads/master
Commit: 08a7a836c393d6a62b9b216eeb01fad0b90b6c52
Parents: 9b88e1d
Author: Charles Yeh <charles...@dropbox.com>
Authored: Mon Nov 9 11:59:32 2015 -0600
Committer: Imran Rashid <iras...@cloudera.com>
Committed: Mon Nov 9 11:59:32 2015 -0600

----------------------------------------------------------------------
 .../spark/deploy/master/ui/MasterWebUI.scala    |  7 +-
 .../status/api/v1/ApplicationListResource.scala |  8 ++
 .../org/apache/spark/status/api/v1/api.scala    |  4 +
 .../scala/org/apache/spark/ui/SparkUI.scala     |  4 +
 .../deploy/master/ui/MasterWebUISuite.scala     | 90 ++++++++++++++++++++
 project/MimaExcludes.scala                      |  3 +
 6 files changed, 114 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/08a7a836/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala 
b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
index 6174fc1..e41554a 100644
--- a/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/master/ui/MasterWebUI.scala
@@ -28,14 +28,17 @@ import org.apache.spark.ui.JettyUtils._
  * Web UI server for the standalone master.
  */
 private[master]
-class MasterWebUI(val master: Master, requestedPort: Int)
+class MasterWebUI(
+    val master: Master,
+    requestedPort: Int,
+    customMasterPage: Option[MasterPage] = None)
   extends WebUI(master.securityMgr, requestedPort, master.conf, name = 
"MasterUI") with Logging
   with UIRoot {
 
   val masterEndpointRef = master.self
   val killEnabled = master.conf.getBoolean("spark.ui.killEnabled", true)
 
-  val masterPage = new MasterPage(this)
+  val masterPage = customMasterPage.getOrElse(new MasterPage(this))
 
   initialize()
 

http://git-wip-us.apache.org/repos/asf/spark/blob/08a7a836/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
 
b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
index 17b521f..0fc0fb5 100644
--- 
a/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
+++ 
b/core/src/main/scala/org/apache/spark/status/api/v1/ApplicationListResource.scala
@@ -62,6 +62,10 @@ private[spark] object ApplicationsListResource {
     new ApplicationInfo(
       id = app.id,
       name = app.name,
+      coresGranted = None,
+      maxCores = None,
+      coresPerExecutor = None,
+      memoryPerExecutorMB = None,
       attempts = app.attempts.map { internalAttemptInfo =>
         new ApplicationAttemptInfo(
           attemptId = internalAttemptInfo.attemptId,
@@ -81,6 +85,10 @@ private[spark] object ApplicationsListResource {
     new ApplicationInfo(
       id = internal.id,
       name = internal.desc.name,
+      coresGranted = Some(internal.coresGranted),
+      maxCores = internal.desc.maxCores,
+      coresPerExecutor = internal.desc.coresPerExecutor,
+      memoryPerExecutorMB = Some(internal.desc.memoryPerExecutorMB),
       attempts = Seq(new ApplicationAttemptInfo(
         attemptId = None,
         startTime = new Date(internal.startTime),

http://git-wip-us.apache.org/repos/asf/spark/blob/08a7a836/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/status/api/v1/api.scala 
b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
index 2bec64f..baddfc5 100644
--- a/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
+++ b/core/src/main/scala/org/apache/spark/status/api/v1/api.scala
@@ -25,6 +25,10 @@ import org.apache.spark.JobExecutionStatus
 class ApplicationInfo private[spark](
     val id: String,
     val name: String,
+    val coresGranted: Option[Int],
+    val maxCores: Option[Int],
+    val coresPerExecutor: Option[Int],
+    val memoryPerExecutorMB: Option[Int],
     val attempts: Seq[ApplicationAttemptInfo])
 
 class ApplicationAttemptInfo private[spark](

http://git-wip-us.apache.org/repos/asf/spark/blob/08a7a836/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala 
b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
index 99085ad..4608bce 100644
--- a/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
+++ b/core/src/main/scala/org/apache/spark/ui/SparkUI.scala
@@ -102,6 +102,10 @@ private[spark] class SparkUI private (
     Iterator(new ApplicationInfo(
       id = appId,
       name = appName,
+      coresGranted = None,
+      maxCores = None,
+      coresPerExecutor = None,
+      memoryPerExecutorMB = None,
       attempts = Seq(new ApplicationAttemptInfo(
         attemptId = None,
         startTime = new Date(startTime),

http://git-wip-us.apache.org/repos/asf/spark/blob/08a7a836/core/src/test/scala/org/apache/spark/deploy/master/ui/MasterWebUISuite.scala
----------------------------------------------------------------------
diff --git 
a/core/src/test/scala/org/apache/spark/deploy/master/ui/MasterWebUISuite.scala 
b/core/src/test/scala/org/apache/spark/deploy/master/ui/MasterWebUISuite.scala
new file mode 100644
index 0000000..fba835f
--- /dev/null
+++ 
b/core/src/test/scala/org/apache/spark/deploy/master/ui/MasterWebUISuite.scala
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.deploy.master.ui
+
+import java.util.Date
+
+import scala.io.Source
+import scala.language.postfixOps
+
+import org.json4s.jackson.JsonMethods._
+import org.json4s.JsonAST.{JNothing, JString, JInt}
+import org.mockito.Mockito.{mock, when}
+import org.scalatest.BeforeAndAfter
+
+import org.apache.spark.{SparkConf, SecurityManager, SparkFunSuite}
+import org.apache.spark.deploy.DeployMessages.MasterStateResponse
+import org.apache.spark.deploy.DeployTestUtils._
+import org.apache.spark.deploy.master._
+import org.apache.spark.rpc.RpcEnv
+
+
+class MasterWebUISuite extends SparkFunSuite with BeforeAndAfter {
+
+  val masterPage = mock(classOf[MasterPage])
+  val master = {
+    val conf = new SparkConf
+    val securityMgr = new SecurityManager(conf)
+    val rpcEnv = RpcEnv.create(Master.SYSTEM_NAME, "localhost", 0, conf, 
securityMgr)
+    val master = new Master(rpcEnv, rpcEnv.address, 0, securityMgr, conf)
+    master
+  }
+  val masterWebUI = new MasterWebUI(master, 0, customMasterPage = 
Some(masterPage))
+
+  before {
+    masterWebUI.bind()
+  }
+
+  after {
+    masterWebUI.stop()
+  }
+
+  test("list applications") {
+    val worker = createWorkerInfo()
+    val appDesc = createAppDesc()
+    // use new start date so it isn't filtered by UI
+    val activeApp = new ApplicationInfo(
+      new Date().getTime, "id", appDesc, new Date(), null, Int.MaxValue)
+    activeApp.addExecutor(worker, 2)
+
+    val workers = Array[WorkerInfo](worker)
+    val activeApps = Array(activeApp)
+    val completedApps = Array[ApplicationInfo]()
+    val activeDrivers = Array[DriverInfo]()
+    val completedDrivers = Array[DriverInfo]()
+    val stateResponse = new MasterStateResponse(
+      "host", 8080, None, workers, activeApps, completedApps,
+      activeDrivers, completedDrivers, RecoveryState.ALIVE)
+
+    when(masterPage.getMasterState).thenReturn(stateResponse)
+
+    val resultJson = Source.fromURL(
+      s"http://localhost:${masterWebUI.boundPort}/api/v1/applications";)
+      .mkString
+    val parsedJson = parse(resultJson)
+    val firstApp = parsedJson(0)
+
+    assert(firstApp \ "id" === JString(activeApp.id))
+    assert(firstApp \ "name" === JString(activeApp.desc.name))
+    assert(firstApp \ "coresGranted" === JInt(2))
+    assert(firstApp \ "maxCores" === JInt(4))
+    assert(firstApp \ "memoryPerExecutorMB" === JInt(1234))
+    assert(firstApp \ "coresPerExecutor" === JNothing)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/08a7a836/project/MimaExcludes.scala
----------------------------------------------------------------------
diff --git a/project/MimaExcludes.scala b/project/MimaExcludes.scala
index dacef91..5022079 100644
--- a/project/MimaExcludes.scala
+++ b/project/MimaExcludes.scala
@@ -134,6 +134,9 @@ object MimaExcludes {
         
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.PostgresDialect.toString"),
         
ProblemFilters.exclude[MissingMethodProblem]("org.apache.spark.sql.jdbc.PostgresDialect.hashCode"),
         
ProblemFilters.exclude[MissingTypesProblem]("org.apache.spark.sql.jdbc.NoopDialect$")
+      ) ++ Seq (
+        ProblemFilters.exclude[MissingMethodProblem](
+          "org.apache.spark.status.api.v1.ApplicationInfo.this")
       )
     case v if v.startsWith("1.5") =>
       Seq(


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to