Repository: spark
Updated Branches:
  refs/heads/branch-2.2 bf0b21298 -> 1f81ade0c


[SPARK-24506][UI] Add UI filters to tabs added after binding

Currently, `spark.ui.filters` are not applied to the handlers added after 
binding the server. This means that every page which is added after starting 
the UI will not have the filters configured on it. This can allow unauthorized 
access to the pages.

The PR adds the filters also to the handlers added after the UI starts.

manual tests (without the patch, starting the thriftserver with `--conf 
spark.ui.filters=org.apache.hadoop.security.authentication.server.AuthenticationFilter
 --conf 
spark.org.apache.hadoop.security.authentication.server.AuthenticationFilter.params="type=simple"`
 you can access `http://localhost:4040/sqlserver`; with the patch, 401 is the 
response as for the other pages).

Author: Marco Gaido <marcogaid...@gmail.com>

Closes #21523 from mgaido91/SPARK-24506.

(cherry picked from commit f53818d35bdef5d20a2718b14a2fed4c468545c6)
Signed-off-by: Marcelo Vanzin <van...@cloudera.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/1f81ade0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/1f81ade0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/1f81ade0

Branch: refs/heads/branch-2.2
Commit: 1f81ade0c7e232f25b39fc6157f63ea91722c829
Parents: bf0b212
Author: Marco Gaido <marcogaid...@gmail.com>
Authored: Tue Jun 12 16:42:44 2018 -0700
Committer: Marcelo Vanzin <van...@cloudera.com>
Committed: Tue Jun 12 16:43:53 2018 -0700

----------------------------------------------------------------------
 .../org/apache/spark/deploy/history/HistoryServer.scala      | 1 -
 core/src/main/scala/org/apache/spark/ui/JettyUtils.scala     | 8 +++++---
 2 files changed, 5 insertions(+), 4 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/1f81ade0/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
----------------------------------------------------------------------
diff --git 
a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala 
b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
index d9c8fda..967cf14 100644
--- a/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
+++ b/core/src/main/scala/org/apache/spark/deploy/history/HistoryServer.scala
@@ -151,7 +151,6 @@ class HistoryServer(
       completed: Boolean) {
     assert(serverInfo.isDefined, "HistoryServer must be bound before attaching 
SparkUIs")
     ui.getHandlers.foreach(attachHandler)
-    addFilters(ui.getHandlers, conf)
   }
 
   /** Detach a reconstructed UI from this server. Only valid after bind(). */

http://git-wip-us.apache.org/repos/asf/spark/blob/1f81ade0/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
----------------------------------------------------------------------
diff --git a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala 
b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
index 7df1de5..3e0b62d 100644
--- a/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
+++ b/core/src/main/scala/org/apache/spark/ui/JettyUtils.scala
@@ -250,7 +250,7 @@ private[spark] object JettyUtils extends Logging {
     filters.foreach {
       case filter : String =>
         if (!filter.isEmpty) {
-          logInfo("Adding filter: " + filter)
+          logInfo(s"Adding filter $filter to 
${handlers.map(_.getContextPath).mkString(", ")}.")
           val holder : FilterHolder = new FilterHolder()
           holder.setClassName(filter)
           // Get any parameters for each filter
@@ -393,7 +393,7 @@ private[spark] object JettyUtils extends Logging {
       }
 
       pool.setMaxThreads(math.max(pool.getMaxThreads, minThreads))
-      ServerInfo(server, httpPort, securePort, collection)
+      ServerInfo(server, httpPort, securePort, conf, collection)
     } catch {
       case e: Exception =>
         server.stop()
@@ -492,10 +492,12 @@ private[spark] case class ServerInfo(
     server: Server,
     boundPort: Int,
     securePort: Option[Int],
+    conf: SparkConf,
     private val rootHandler: ContextHandlerCollection) {
 
-  def addHandler(handler: ContextHandler): Unit = {
+  def addHandler(handler: ServletContextHandler): Unit = {
     
handler.setVirtualHosts(JettyUtils.toVirtualHosts(JettyUtils.SPARK_CONNECTOR_NAME))
+    JettyUtils.addFilters(Seq(handler), conf)
     rootHandler.addHandler(handler)
     if (!handler.isStarted()) {
       handler.start()


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to