Github user tgravescs commented on a diff in the pull request: https://github.com/apache/spark/pull/4688#discussion_r25538854 --- Diff: yarn/src/main/scala/org/apache/spark/deploy/yarn/YarnSparkHadoopUtil.scala --- @@ -82,6 +93,102 @@ class YarnSparkHadoopUtil extends SparkHadoopUtil { if (credentials != null) credentials.getSecretKey(new Text(key)) else null } + override def setPrincipalAndKeytabForLogin(principal: String, keytab: String): Unit = { + loginPrincipal = Option(principal) + keytabFile = Option(keytab) + } + + private[spark] override def scheduleLoginFromKeytab( + callback: (SerializableBuffer) => Unit): Unit = { + + loginPrincipal match { + case Some(principal) => + val keytab = keytabFile.get + val remoteFs = FileSystem.get(conf) + val remoteKeytabPath = new Path( + remoteFs.getHomeDirectory, System.getenv("SPARK_STAGING_DIR") + Path.SEPARATOR + keytab) + val localFS = FileSystem.getLocal(conf) + // At this point, SparkEnv is likely no initialized, so create a dir, put the keytab there. + val tempDir = Utils.createTempDir() + val localURI = new URI(tempDir.getAbsolutePath + Path.SEPARATOR + keytab) + val qualifiedURI = new URI(localFS.makeQualified(new Path(localURI)).toString) + FileUtil.copy( --- End diff -- Yeah I was saying use distributed cache for the keytab to the AM. The distributed cache code is in Client.prepareLocalResources which populates stuff to go to the AM, then for each executor its handled in ExecutorRunnable.prepareLocalResources. You would need some way or use new config that would say only send this to the AM and not on to the executors. We used to do that for jars when we used the Spark addJar distribution mechanism.
--- If your project is set up for it, you can reply to this email and have your reply appear on GitHub as well. If your project does not have this feature enabled and wishes so, or if the feature is enabled but not working, please contact infrastructure at infrastruct...@apache.org or file a JIRA ticket with INFRA. --- --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org