Repository: spark
Updated Branches:
  refs/heads/master 584c7f143 -> 2387f1e31


[SPARK-21675][WEBUI] Add a navigation bar at the bottom of the Details for 
Stage Page

## What changes were proposed in this pull request?

1. In Spark Web UI, the Details for Stage Page don't have a navigation bar at 
the bottom. When we drop down to the bottom, it is better for us to see a navi 
bar right there to go wherever we what.
2. Executor ID is not equivalent to Host, it may be  better to separate them, 
and then we can group the tasks by Hosts .

## How was this patch tested?
manually test
![wx20170809-165606](https://user-images.githubusercontent.com/8326978/29114161-f82b4920-7d25-11e7-8d0c-0c036b008a78.png)

Please review http://spark.apache.org/contributing.html before opening a pull 
request.

Author: Kent Yao <yaooq...@hotmail.com>

Closes #18893 from yaooqinn/SPARK-21675.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/2387f1e3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/2387f1e3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/2387f1e3

Branch: refs/heads/master
Commit: 2387f1e316a90fc9a392ab69ee3c2257b622af4d
Parents: 584c7f1
Author: Kent Yao <yaooq...@hotmail.com>
Authored: Fri Aug 11 14:57:06 2017 +0100
Committer: Sean Owen <so...@cloudera.com>
Committed: Fri Aug 11 14:57:06 2017 +0100

----------------------------------------------------------------------
 .../main/scala/org/apache/spark/ui/PagedTable.scala   |  4 +++-
 .../scala/org/apache/spark/ui/jobs/StagePage.scala    | 14 +++++++++-----
 2 files changed, 12 insertions(+), 6 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/2387f1e3/core/src/main/scala/org/apache/spark/ui/PagedTable.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/PagedTable.scala 
b/core/src/main/scala/org/apache/spark/ui/PagedTable.scala
index 79974df..65fa383 100644
--- a/core/src/main/scala/org/apache/spark/ui/PagedTable.scala
+++ b/core/src/main/scala/org/apache/spark/ui/PagedTable.scala
@@ -94,14 +94,16 @@ private[ui] trait PagedTable[T] {
     val _dataSource = dataSource
     try {
       val PageData(totalPages, data) = _dataSource.pageData(page)
+      val pageNavi = pageNavigation(page, _dataSource.pageSize, totalPages)
       <div>
-        {pageNavigation(page, _dataSource.pageSize, totalPages)}
+        {pageNavi}
         <table class={tableCssClass} id={tableId}>
           {headers}
           <tbody>
             {data.map(row)}
           </tbody>
         </table>
+        {pageNavi}
       </div>
     } catch {
       case e: IndexOutOfBoundsException =>

http://git-wip-us.apache.org/repos/asf/spark/blob/2387f1e3/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala 
b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
index 8ed5174..633e740 100644
--- a/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
+++ b/core/src/main/scala/org/apache/spark/ui/jobs/StagePage.scala
@@ -835,7 +835,8 @@ private[ui] class TaskTableRowData(
     val speculative: Boolean,
     val status: String,
     val taskLocality: String,
-    val executorIdAndHost: String,
+    val executorId: String,
+    val host: String,
     val launchTime: Long,
     val duration: Long,
     val formatDuration: String,
@@ -1017,7 +1018,8 @@ private[ui] class TaskDataSource(
       info.speculative,
       info.status,
       info.taskLocality.toString,
-      s"${info.executorId} / ${info.host}",
+      info.executorId,
+      info.host,
       info.launchTime,
       duration,
       formatDuration,
@@ -1047,7 +1049,8 @@ private[ui] class TaskDataSource(
       case "Attempt" => Ordering.by(_.attempt)
       case "Status" => Ordering.by(_.status)
       case "Locality Level" => Ordering.by(_.taskLocality)
-      case "Executor ID / Host" => Ordering.by(_.executorIdAndHost)
+      case "Executor ID" => Ordering.by(_.executorId)
+      case "Host" => Ordering.by(_.host)
       case "Launch Time" => Ordering.by(_.launchTime)
       case "Duration" => Ordering.by(_.duration)
       case "Scheduler Delay" => Ordering.by(_.schedulerDelay)
@@ -1200,7 +1203,7 @@ private[ui] class TaskPagedTable(
     val taskHeadersAndCssClasses: Seq[(String, String)] =
       Seq(
         ("Index", ""), ("ID", ""), ("Attempt", ""), ("Status", ""), ("Locality 
Level", ""),
-        ("Executor ID / Host", ""), ("Launch Time", ""), ("Duration", ""),
+        ("Executor ID", ""), ("Host", ""), ("Launch Time", ""), ("Duration", 
""),
         ("Scheduler Delay", TaskDetailsClassNames.SCHEDULER_DELAY),
         ("Task Deserialization Time", 
TaskDetailsClassNames.TASK_DESERIALIZATION_TIME),
         ("GC Time", ""),
@@ -1271,8 +1274,9 @@ private[ui] class TaskPagedTable(
       <td>{if (task.speculative) s"${task.attempt} (speculative)" else 
task.attempt.toString}</td>
       <td>{task.status}</td>
       <td>{task.taskLocality}</td>
+      <td>{task.executorId}</td>
       <td>
-        <div style="float: left">{task.executorIdAndHost}</div>
+        <div style="float: left">{task.host}</div>
         <div style="float: right">
         {
           task.logs.map {


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to