Repository: spark
Updated Branches:
  refs/heads/master 0d3714d22 -> f53818d35


[SPARK-24506][UI] Add UI filters to tabs added after binding

## What changes were proposed in this pull request?

Currently, `spark.ui.filters` are not applied to the handlers added after 
binding the server. This means that every page which is added after starting 
the UI will not have the filters configured on it. This can allow unauthorized 
access to the pages.

The PR adds the filters also to the handlers added after the UI starts.

## How was this patch tested?

manual tests (without the patch, starting the thriftserver with `--conf 
spark.ui.filters=org.apache.hadoop.security.authentication.server.AuthenticationFilter
 --conf 
spark.org.apache.hadoop.security.authentication.server.AuthenticationFilter.params="type=simple"`
 you can access `http://localhost:4040/sqlserver`; with the patch, 401 is the 
response as for the other pages).

Author: Marco Gaido <marcogaid...@gmail.com>

Closes #21523 from mgaido91/SPARK-24506.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/f53818d3
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/f53818d3
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/f53818d3

Branch: refs/heads/master
Commit: f53818d35bdef5d20a2718b14a2fed4c468545c6
Parents: 0d3714d
Author: Marco Gaido <marcogaid...@gmail.com>
Authored: Tue Jun 12 16:42:44 2018 -0700
Committer: Marcelo Vanzin <van...@cloudera.com>
Committed: Tue Jun 12 16:42:44 2018 -0700

----------------------------------------------------------------------
 .../org/apache/spark/deploy/history/HistoryServer.scala      | 1 -
 core/src/main/scala/org/apache/spark/ui/JettyUtils.scala     | 8 +++++---
 2 files changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/f53818d3/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala 
b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
index a9a4d5a..066275e 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
@@ -152,7 +152,6 @@ class HistoryServer(
     assert(serverInfo.isDefined, "HistoryServer must be bound before attaching 
SparkUIs")
     handlers.synchronized {
       ui.getHandlers.foreach(attachHandler)
-      addFilters(ui.getHandlers, conf)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/spark/blob/f53818d3/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala 
b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
index d6a025a..52a9551 100644
--- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
@@ -263,7 +263,7 @@ private[spark] object JettyUtils extends Logging {
     filters.foreach {
       case filter : String =>
         if (!filter.isEmpty) {
-          logInfo("Adding filter: " + filter)
+          logInfo(s"Adding filter $filter to 
${handlers.map(_.getContextPath).mkString(", ")}.")
           val holder : FilterHolder = new FilterHolder()
           holder.setClassName(filter)
           // Get any parameters for each filter
@@ -407,7 +407,7 @@ private[spark] object JettyUtils extends Logging {
       }
 
       pool.setMaxThreads(math.max(pool.getMaxThreads, minThreads))
-      ServerInfo(server, httpPort, securePort, collection)
+      ServerInfo(server, httpPort, securePort, conf, collection)
     } catch {
       case e: Exception =>
         server.stop()
@@ -507,10 +507,12 @@ private[spark] case class ServerInfo(
     server: Server,
     boundPort: Int,
     securePort: Option[Int],
+    conf: SparkConf,
     private val rootHandler: ContextHandlerCollection) {
 
-  def addHandler(handler: ContextHandler): Unit = {
+  def addHandler(handler: ServletContextHandler): Unit = {
     
handler.setVirtualHosts(JettyUtils.toVirtualHosts(JettyUtils.SPARK_CONNECTOR_NAME))
+    JettyUtils.addFilters(Seq(handler), conf)
     rootHandler.addHandler(handler)
     if (!handler.isStarted()) {
       handler.start()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to