[ https://issues.apache.org/jira/browse/SPARK-16885?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15407073#comment-15407073 ]
Sean Owen commented on SPARK-16885: ----------------------------------- I'm pretty certain it's because you specified a directory, not a file? try a file to confirm. > Spark shell failed to run in yarn-client mode > --------------------------------------------- > > Key: SPARK-16885 > URL: https://issues.apache.org/jira/browse/SPARK-16885 > Project: Spark > Issue Type: Bug > Components: Spark Shell > Affects Versions: 2.0.0 > Environment: Ubuntu 12.04 > Hadoop 2.7.2 + Yarn > Reporter: Yury Zhyshko > Attachments: spark-env.sh > > > I've installed Hadoop + Yarn in pseudo distributed mode following these > instructions: > https://hadoop.apache.org/docs/r2.7.2/hadoop-project-dist/hadoop-common/SingleCluster.html#YARN_on_a_Single_Node > After that I downloaded and installed a prebuild Spark for Hadoop 2.7 > The command that I used to run a shell: > ./bin/spark-shell --master yarn --deploy-mode client --conf > spark.yarn.archive=/home/yzhishko/work/spark/jars > Here is the error: > Setting default log level to "WARN". > To adjust logging level use sc.setLogLevel(newLevel). > 16/08/03 17:13:50 WARN util.NativeCodeLoader: Unable to load native-hadoop > library for your platform... using builtin-java classes where applicable > 16/08/03 17:13:52 ERROR spark.SparkContext: Error initializing SparkContext. > java.lang.IllegalArgumentException: Can not create a Path from an empty string > at org.apache.hadoop.fs.Path.checkPathArg(Path.java:126) > at org.apache.hadoop.fs.Path.<init>(Path.java:134) > at org.apache.hadoop.fs.Path.<init>(Path.java:93) > at > org.apache.spark.deploy.yarn.Client.copyFileToRemote(Client.scala:338) > at > org.apache.spark.deploy.yarn.Client.org$apache$spark$deploy$yarn$Client$$distribute$1(Client.scala:433) > at > org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:472) > at > org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:834) > at > org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:167) > at > org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:56) > at > org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:149) > at org.apache.spark.SparkContext.<init>(SparkContext.scala:500) > at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2256) > at > org.apache.spark.sql.SparkSession$Builder$$anonfun$8.apply(SparkSession.scala:831) > at > org.apache.spark.sql.SparkSession$Builder$$anonfun$8.apply(SparkSession.scala:823) > at scala.Option.getOrElse(Option.scala:121) > at > org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:823) > at org.apache.spark.repl.Main$.createSparkSession(Main.scala:101) > at $line3.$read$$iw$$iw.<init>(<console>:15) > at $line3.$read$$iw.<init>(<console>:31) > at $line3.$read.<init>(<console>:33) > at $line3.$read$.<init>(<console>:37) > at $line3.$read$.<clinit>(<console>) > at $line3.$eval$.$print$lzycompute(<console>:7) > at $line3.$eval$.$print(<console>:6) > at $line3.$eval.$print(<console>) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786) > at > scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047) > at > scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638) > at > scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637) > at > scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31) > at > scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19) > at > scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637) > at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569) > at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565) > at > scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807) > at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681) > at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395) > at > org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply$mcV$sp(SparkILoop.scala:38) > at > org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:37) > at > org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:37) > at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:214) > at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:37) > at org.apache.spark.repl.SparkILoop.loadFiles(SparkILoop.scala:94) > at > scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:920) > at > scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) > at > scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909) > at > scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97) > at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909) > at org.apache.spark.repl.Main$.doMain(Main.scala:68) > at org.apache.spark.repl.Main$.main(Main.scala:51) > at org.apache.spark.repl.Main.main(Main.scala) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:498) > at > org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:729) > at > org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:185) > at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:210) > at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:124) > at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) > 16/08/03 17:13:52 WARN cluster.YarnSchedulerBackend$YarnSchedulerEndpoint: > Attempted to request executors before the AM has registered! > 16/08/03 17:13:52 WARN metrics.MetricsSystem: Stopping a MetricsSystem that > is not running > java.lang.IllegalArgumentException: Can not create a Path from an empty string > at org.apache.hadoop.fs.Path.checkPathArg(Path.java:126) > at org.apache.hadoop.fs.Path.<init>(Path.java:134) > at org.apache.hadoop.fs.Path.<init>(Path.java:93) > at org.apache.spark.deploy.yarn.Client.copyFileToRemote(Client.scala:338) > at > org.apache.spark.deploy.yarn.Client.org$apache$spark$deploy$yarn$Client$$distribute$1(Client.scala:433) > at > org.apache.spark.deploy.yarn.Client.prepareLocalResources(Client.scala:472) > at > org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:834) > at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:167) > at > org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:56) > at > org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:149) > at org.apache.spark.SparkContext.<init>(SparkContext.scala:500) > at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2256) > at > org.apache.spark.sql.SparkSession$Builder$$anonfun$8.apply(SparkSession.scala:831) > at > org.apache.spark.sql.SparkSession$Builder$$anonfun$8.apply(SparkSession.scala:823) > at scala.Option.getOrElse(Option.scala:121) > at > org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:823) > at org.apache.spark.repl.Main$.createSparkSession(Main.scala:101) > ... 47 elided > <console>:14: error: not found: value spark > import spark.implicits._ > ^ > <console>:14: error: not found: value spark > import spark.sql > ^ > Welcome to > ____ __ > / __/__ ___ _____/ /__ > _\ \/ _ \/ _ `/ __/ '_/ > /___/ .__/\_,_/_/ /_/\_\ version 2.0.0 > /_/ > > Using Scala version 2.11.8 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_91) > Type in expressions to have them evaluated. > Type :help for more information. > spark-env.sh is attached -- This message was sent by Atlassian JIRA (v6.3.4#6332) --------------------------------------------------------------------- To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org For additional commands, e-mail: issues-h...@spark.apache.org