Devaraj K created SPARK-26650: --------------------------------- Summary: Yarn Client throws 'ClassNotFoundException: org.apache.hadoop.hbase.HBaseConfiguration' Key: SPARK-26650 URL: https://issues.apache.org/jira/browse/SPARK-26650 Project: Spark Issue Type: Bug Components: Build, YARN Affects Versions: 3.0.0 Reporter: Devaraj K
{code:xml} 19/01/17 11:33:00 WARN security.HBaseDelegationTokenProvider: Fail to invoke HBaseConfiguration java.lang.ClassNotFoundException: org.apache.hadoop.hbase.HBaseConfiguration at java.net.URLClassLoader.findClass(URLClassLoader.java:381) at java.lang.ClassLoader.loadClass(ClassLoader.java:424) at java.lang.ClassLoader.loadClass(ClassLoader.java:357) at org.apache.spark.deploy.security.HBaseDelegationTokenProvider.hbaseConf(HBaseDelegationTokenProvider.scala:69) at org.apache.spark.deploy.security.HBaseDelegationTokenProvider.delegationTokensRequired(HBaseDelegationTokenProvider.scala:62) at org.apache.spark.deploy.security.HadoopDelegationTokenManager.$anonfun$obtainDelegationTokens$1(HadoopDelegationTokenManager.scala:134) at scala.collection.TraversableLike.$anonfun$flatMap$1(TraversableLike.scala:244) at scala.collection.Iterator.foreach(Iterator.scala:941) at scala.collection.Iterator.foreach$(Iterator.scala:941) at scala.collection.AbstractIterator.foreach(Iterator.scala:1429) at scala.collection.MapLike$DefaultValuesIterable.foreach(MapLike.scala:213) at scala.collection.TraversableLike.flatMap(TraversableLike.scala:244) at scala.collection.TraversableLike.flatMap$(TraversableLike.scala:241) at scala.collection.AbstractTraversable.flatMap(Traversable.scala:108) at org.apache.spark.deploy.security.HadoopDelegationTokenManager.obtainDelegationTokens(HadoopDelegationTokenManager.scala:133) at org.apache.spark.deploy.yarn.security.YARNHadoopDelegationTokenManager.obtainDelegationTokens(YARNHadoopDelegationTokenManager.scala:59) at org.apache.spark.deploy.yarn.Client.setupSecurityToken(Client.scala:305) at org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:1014) at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:181) at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:58) at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:184) at org.apache.spark.SparkContext.<init>(SparkContext.scala:509) at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2466) at org.apache.spark.sql.SparkSession$Builder.$anonfun$getOrCreate$5(SparkSession.scala:948) at scala.Option.getOrElse(Option.scala:138) at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:939) at org.apache.spark.examples.SparkPi$.main(SparkPi.scala:30) at org.apache.spark.examples.SparkPi.main(SparkPi.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52) at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:853) at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:168) at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:196) at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:87) at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:932) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:941) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 19/01/17 11:33:00 INFO yarn.Client: Submitting application application_1544212645385_0197 to ResourceManager 19/01/17 11:33:00 INFO impl.YarnClientImpl: Submitted application application_1544212645385_0197 {code} -- This message was sent by Atlassian JIRA (v7.6.3#76005) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org