[ https://issues.apache.org/jira/browse/HBASE-15225?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15168809#comment-15168809 ]
Sanjay Kumar commented on HBASE-15225: -------------------------------------- [~ted.m] - We recently upgraded to Hortonworks 2.3.4 (HBase - 1.1.2 / spark 1.5.2). I built the code that you shared and tried the HBaseBulkGetExample. I see the error given below. Is it because of we have missed some configuration ? Have you seen something like this ? org.apache.hadoop.ipc.RemoteException(java.io.IOException): Delegation Token can be issued only with kerberos or web authentication at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getDelegationToken(FSNamesystem.java:6744) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getDelegationToken(NameNodeRpcServer.java:628) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getDelegationToken(ClientNamenodeProtocolServerSideTranslatorPB.java:987) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2151) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2147) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2145) at org.apache.hadoop.ipc.Client.call(Client.java:1468) at org.apache.hadoop.ipc.Client.call(Client.java:1399) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232) at com.sun.proxy.$Proxy26.getDelegationToken(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getDelegationToken(ClientNamenodeProtocolTranslatorPB.java:909) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:497) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102) at com.sun.proxy.$Proxy27.getDelegationToken(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.getDelegationToken(DFSClient.java:1029) at org.apache.hadoop.hdfs.DistributedFileSystem.getDelegationToken(DistributedFileSystem.java:1355) at org.apache.hadoop.fs.FileSystem.collectDelegationTokens(FileSystem.java:529) at org.apache.hadoop.fs.FileSystem.addDelegationTokens(FileSystem.java:507) at org.apache.hadoop.hdfs.DistributedFileSystem.addDelegationTokens(DistributedFileSystem.java:2041) at org.apache.spark.deploy.yarn.YarnSparkHadoopUtil$$anonfun$obtainTokensForNamenodes$1.apply(YarnSparkHadoopUtil.scala:126) at org.apache.spark.deploy.yarn.YarnSparkHadoopUtil$$anonfun$obtainTokensForNamenodes$1.apply(YarnSparkHadoopUtil.scala:123) at scala.collection.immutable.Set$Set1.foreach(Set.scala:74) at org.apache.spark.deploy.yarn.YarnSparkHadoopUtil.obtainTokensForNamenodes(YarnSparkHadoopUtil.scala:123) at org.apache.spark.deploy.yarn.Client.getTokenRenewalInterval(Client.scala:500) at org.apache.spark.deploy.yarn.Client.setupLaunchEnv(Client.scala:533) at org.apache.spark.deploy.yarn.Client.createContainerLaunchContext(Client.scala:633) at org.apache.spark.deploy.yarn.Client.submitApplication(Client.scala:123) at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:56) at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:144) at org.apache.spark.SparkContext.<init>(SparkContext.scala:523) at $line137.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$HBaseBulkGetExample$.main(<console>:94) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:83) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:88) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:90) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:92) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:94) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:96) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:98) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:100) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:102) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:104) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:106) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:108) at $line139.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:110) at $line139.$read$$iwC$$iwC$$iwC.<init>(<console>:112) at $line139.$read$$iwC$$iwC.<init>(<console>:114) at $line139.$read$$iwC.<init>(<console>:116) at $line139.$read.<init>(<console>:118) at $line139.$read$.<init>(<console>:122) at $line139.$read$.<clinit>(<console>) at $line139.$eval$.<init>(<console>:7) at $line139.$eval$.<clinit>(<console>) at $line139.$eval.$print(<console>) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:497) at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065) at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340) at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840) at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871) at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819) at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857) at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902) at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814) at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657) at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665) at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670) at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997) at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135) at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945) at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059) at org.apache.spark.repl.Main$.main(Main.scala:31) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:497) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:674) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 16/02/26 04:26:29 ERROR Utils: Uncaught exception in thread main java.lang.NullPointerException at org.apache.spark.network.netty.NettyBlockTransferService.close(NettyBlockTransferService.scala:152) at org.apache.spark.storage.BlockManager.stop(BlockManager.scala:1228) at org.apache.spark.SparkEnv.stop(SparkEnv.scala:100) at org.apache.spark.SparkContext$$anonfun$stop$12.apply$mcV$sp(SparkContext.scala:1749) at org.apache.spark.util.Utils$.tryLogNonFatalError(Utils.scala:1185) at org.apache.spark.SparkContext.stop(SparkContext.scala:1748) at org.apache.spark.SparkContext.<init>(SparkContext.scala:593) at $line137.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$HBaseBulkGetExample$.main(<console>:94) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:83) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:88) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:90) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:92) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:94) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:96) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:98) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:100) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:102) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:104) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:106) at $line139.$read$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:108) at $line139.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:110) at $line139.$read$$iwC$$iwC$$iwC.<init>(<console>:112) at $line139.$read$$iwC$$iwC.<init>(<console>:114) at $line139.$read$$iwC.<init>(<console>:116) at $line139.$read.<init>(<console>:118) at $line139.$read$.<init>(<console>:122) at $line139.$read$.<clinit>(<console>) at $line139.$eval$.<init>(<console>:7) at $line139.$eval$.<clinit>(<console>) at $line139.$eval.$print(<console>) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:497) at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065) at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340) at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840) at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871) at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819) at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857) at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902) at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814) at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657) at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665) at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670) at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997) at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135) at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945) at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059) at org.apache.spark.repl.Main$.main(Main.scala:31) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:497) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:674) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) > Connecting to HBase via newAPIHadoopRDD in PySpark gives > org.apache.hadoop.hbase.client.RetriesExhaustedException > ------------------------------------------------------------------------------------------------------------------ > > Key: HBASE-15225 > URL: https://issues.apache.org/jira/browse/HBASE-15225 > Project: HBase > Issue Type: Bug > Components: mapreduce, spark > Affects Versions: 0.98.4 > Environment: spark 1.6.0 , Hbase 0.98.4, kerberos, > hbase.rpc.protection set to authentication. > Reporter: Sanjay Kumar > > Unable to read HBase table into Spark with hbase security authentication set > to kerberos. Seeing the following error. > : org.apache.hadoop.hbase.client.RetriesExhaustedException: Failed after > attempts=31, exceptions: > Thu Feb 04 22:01:55 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:01:56 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:01:56 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:01:57 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:01:59 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:02:03 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:02:13 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:02:23 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:02:34 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:02:44 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:03:04 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:03:24 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:03:44 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:04:04 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:04:24 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:04:44 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > Thu Feb 04 22:05:04 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.IOException: Connection reset by peer > . > . > . > Thu Feb 04 22:09:46 CST 2016, > org.apache.hadoop.hbase.client.RpcRetryingCaller@395327da, > java.io.IOException: Call to d-767tfz1.target.com/10.66.241.13:60020 failed > on local exception: java.io.EOFException > at > org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:129) > at > org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:90) > at > org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:282) > at > org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:187) > at org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:182) > at org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:109) > at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:738) > at org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:178) > at org.apache.hadoop.hbase.client.MetaScanner.metaScan(MetaScanner.java:82) > at > org.apache.hadoop.hbase.client.MetaScanner.allTableRegions(MetaScanner.java:282) > at org.apache.hadoop.hbase.client.HTable.getRegionLocations(HTable.java:616) > at > org.apache.hadoop.hbase.util.RegionSizeCalculator.<init>(RegionSizeCalculator.java:79) > at > org.apache.hadoop.hbase.util.RegionSizeCalculator.<init>(RegionSizeCalculator.java:64) > at > org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.getSplits(TableInputFormatBase.java:160) > at org.apache.spark.rdd.NewHadoopRDD.getPartitions(NewHadoopRDD.scala:115) > at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239) > at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237) > at scala.Option.getOrElse(Option.scala:120) > at org.apache.spark.rdd.RDD.partitions(RDD.scala:237) > at > org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35) > at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239) > at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237) > at scala.Option.getOrElse(Option.scala:120) > at org.apache.spark.rdd.RDD.partitions(RDD.scala:237) > at org.apache.spark.rdd.RDD$$anonfun$take$1.apply(RDD.scala:1277) > at > org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) > at > org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108) > at org.apache.spark.rdd.RDD.withScope(RDD.scala:306) > at org.apache.spark.rdd.RDD.take(RDD.scala:1272) > at org.apache.spark.api.python.SerDeUtil$.pairRDDToPython(SerDeUtil.scala:202) > at org.apache.spark.api.python.PythonRDD$.newAPIHadoopRDD(PythonRDD.scala:530) > at org.apache.spark.api.python.PythonRDD.newAPIHadoopRDD(PythonRDD.scala) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:606) > at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:231) > at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:379) > at py4j.Gateway.invoke(Gateway.java:259) > at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:133) > at py4j.commands.CallCommand.execute(CallCommand.java:79) > at py4j.GatewayConnection.run(GatewayConnection.java:207) > at java.lang.Thread.run(Thread.java:744) > Caused by: java.io.IOException: Call to > d-767tfz1.target.com/10.66.241.13:60020 failed on local exception: > java.io.EOFException > at org.apache.hadoop.hbase.ipc.RpcClient.wrapException(RpcClient.java:1484) > at org.apache.hadoop.hbase.ipc.RpcClient.call(RpcClient.java:1456) > at > org.apache.hadoop.hbase.ipc.RpcClient.callBlockingMethod(RpcClient.java:1656) > at > org.apache.hadoop.hbase.ipc.RpcClient$BlockingRpcChannelImplementation.callBlockingMethod(RpcClient.java:1714) > at > org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.scan(ClientProtos.java:29900) > at > org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:308) > at > org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:164) > at > org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:59) > at > org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:114) > ... 42 more > Caused by: java.io.EOFException > at java.io.DataInputStream.readInt(DataInputStream.java:392) > at > org.apache.hadoop.hbase.ipc.RpcClient$Connection.readResponse(RpcClient.java:1071) > at org.apache.hadoop.hbase.ipc.RpcClient$Connection.run(RpcClient.java:727) -- This message was sent by Atlassian JIRA (v6.3.4#6332)