Hi All,

I receive the following error when using HBaseTestingUtility and start up
the startMiniHBaseCluster. As you can see I am attempting to localise my
test HBase test framework, so I cannot understand what could be going wrong
in regards to networking.

An exception or error caused a run to abort: Call From
krakendev/127.0.0.1 to localhost.localdomain:8020 failed on connection
exception: java.net.ConnectException: Connection refused; For more
details see:  http://wiki.apache.org/hadoop/ConnectionRefused
  java.net.ConnectException: Call From krakendev/127.0.0.1 to
localhost.localdomain:8020 failed on connection exception:
java.net.ConnectException: Connection refused; For more details see:
http://wiki.apache.org/hadoop/ConnectionRefused
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
    at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
    at org.apache.hadoop.net.NetUtils.wrapWithMessage(NetUtils.java:791)
    at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:731)
    at org.apache.hadoop.ipc.Client.call(Client.java:1470)
    at org.apache.hadoop.ipc.Client.call(Client.java:1403)
    at 
org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:230)
    at com.sun.proxy.$Proxy14.getFileInfo(Unknown Source)
    at 
org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:752)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:256)
    at 
org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:104)
    at com.sun.proxy.$Proxy15.getFileInfo(Unknown Source)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at org.apache.hadoop.hbase.fs.HFileSystem$1.invoke(HFileSystem.java:279)
    at com.sun.proxy.$Proxy16.getFileInfo(Unknown Source)
    at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:2095)
    at 
org.apache.hadoop.hdfs.DistributedFileSystem$19.doCall(DistributedFileSystem.java:1214)
    at 
org.apache.hadoop.hdfs.DistributedFileSystem$19.doCall(DistributedFileSystem.java:1210)
    at 
org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
    at 
org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1210)
    at 
org.apache.hadoop.fs.FilterFileSystem.getFileStatus(FilterFileSystem.java:424)
    at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1409)
    at org.apache.hadoop.fs.FileSystem.deleteOnExit(FileSystem.java:1364)
    at 
org.apache.hadoop.hbase.HBaseTestingUtility.getNewDataTestDirOnTestFS(HBaseTestingUtility.java:479)
    at 
org.apache.hadoop.hbase.HBaseTestingUtility.setupDataTestDirOnTestFS(HBaseTestingUtility.java:458)
    at 
org.apache.hadoop.hbase.HBaseTestingUtility.getDataTestDirOnTestFS(HBaseTestingUtility.java:431)
    at 
org.apache.hadoop.hbase.HBaseTestingUtility.getDefaultRootDirPath(HBaseTestingUtility.java:1105)
    at 
org.apache.hadoop.hbase.HBaseTestingUtility.createRootDir(HBaseTestingUtility.java:1136)
    at 
org.apache.hadoop.hbase.HBaseTestingUtility.startMiniHBaseCluster(HBaseTestingUtility.java:973)
    at 
org.apache.hadoop.hbase.HBaseTestingUtility.startMiniHBaseCluster(HBaseTestingUtility.java:951)
    at 
com.thomsonreuters.kraken.medusa.service.MySharedHBaseCluster$class.beforeAll(MySharedHBaseCluster.scala:28)
    at 
com.thomsonreuters.kraken.medusa.service.AlertStatusServiceSpec.beforeAll(AlertStatusServiceSpec.scala:12)
    at 
org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:212)
    at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:210)
    at 
com.thomsonreuters.kraken.medusa.service.AlertStatusServiceSpec.org$scalatest$BeforeAndAfter$$super$run(AlertStatusServiceSpec.scala:7)
    at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:258)
    at 
com.thomsonreuters.kraken.medusa.service.AlertStatusServiceSpec.run(AlertStatusServiceSpec.scala:7)
    at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:45)
    at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1340)
    at 
org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$1.apply(Runner.scala:1334)
    at scala.collection.immutable.List.foreach(List.scala:318)
    at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:1334)
    at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1011)
    at 
org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1010)
    at 
org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:1500)
    at 
org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1010)
    at org.scalatest.tools.Runner$.run(Runner.scala:850)
    at org.scalatest.tools.Runner.run(Runner.scala)
    at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:138)
    at 
org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
  Caused by: java.net.ConnectException: Connection refused
    at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method)
    at sun.nio.ch.SocketChannelImpl.finishConnect(SocketChannelImpl.java:717)
    at 
org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206)
    at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530)
    at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494)
    at org.apache.hadoop.ipc.Client$Connection.setupConnection(Client.java:609)
    at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:708)
    at org.apache.hadoop.ipc.Client$Connection.access$2800(Client.java:370)
    at org.apache.hadoop.ipc.Client.getConnection(Client.java:1519)
    at org.apache.hadoop.ipc.Client.call(Client.java:1442)
    ... 52 more

There is nothing else running on the port 8020, and I have another project
which uses the same HBaseTestingUtility and it is able to run successfully.
The other project is a pure Scala project, while the project I am having
issues with is Scala/java project (although I do not think this is the
problem).

object MySharedHBaseCluster {
 var _hbase: HBaseTestingUtility = _}
trait MySharedHBaseCluster extends BeforeAndAfter with BeforeAndAfterAll {
 self: Suite =>

 import MySharedHBaseCluster._

 def hbase: HBaseTestingUtility = _hbase

 override def beforeAll() {

   _hbase = HBaseTestingUtility.createLocalHTU()

   _hbase.getConfiguration.set("hbase.master.info.port", "60050")
   _hbase.getConfiguration.set("hadoop.security.authentication", "simple")

   _hbase.cleanupTestDir()
   println("Starting Mini-Cluster...")
   _hbase.startMiniZKCluster()
   _hbase.startMiniHBaseCluster(1, 1)
   _hbase.startMiniDFSCluster(1)

   println("Mini-Cluster started")
 }

 override def afterAll() {
   println("Shutting down Mini-Cluster...")
   _hbase.shutdownMiniDFSCluster()
   _hbase.shutdownMiniHBaseCluster()
   _hbase.shutdownMiniZKCluster()
   println("Mini-Cluster shutdown")
   _hbase.cleanupTestDir()
 }

}

Any ideas what could be causing this, and maybe what I can do to resolve
this issue?
Thanks,

Keech Achara

Reply via email to