Hi Yves, Looks like this issue has been resloved. please find the jira link below.
https://issues.apache.org/jira/browse/SQOOP-1527, https://cwiki.apache.org/confluence/display/SQOOP/Security+Guide+On+Sqoop+2#SecurityGuideOnSqoop2-KerberosAuthentication Thanks K On Wed, Apr 22, 2015 at 11:30 PM, yves callaert <yves_calla...@hotmail.com> wrote: > Hi Kumar, > Apparently there is a JIRA for this issue: > https://issues.apache.org/jira/browse/SQOOP-1527 > I was reading up on the problem on this link > https://community.cloudera.com/t5/Data-Ingestion-Integration/Does-kerberos-allows-sqoop-to-migrate-the-remote-mysql-sql/td-p/21326 > and currently there is no support yet for sqoop2 and kerberos. > > Haven't had the problem myself, so hope the link helps. > > With Regards > Yves C. > > ------------------------------ > Date: Wed, 22 Apr 2015 14:58:35 -0700 > Subject: Sqoop2 error when I run the jobs through hue. > From: kjayapa...@gmail.com > To: cdh-u...@cloudera.org; user@hadoop.apache.org > > > Hi, > > > I am getting this error when I execute run the job in sqoop2 from hue. I > see lots of people talking about this error but no proper resolution. > > Did any one able to resolve this issue. Any help is appreciated. > > > > 2015-04-22 21:36:07,281 ERROR > org.apache.sqoop.submission.mapreduce.MapreduceSubmissionEngine: Error in > submitting job > org.apache.hadoop.security.AccessControlException: SIMPLE authentication > is not enabled. Available:[TOKEN, KERBEROS] > at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native > Method) > at > sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) > at > sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) > at java.lang.reflect.Constructor.newInstance(Constructor.java:526) > at > org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106) > at > org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73) > at > org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1914) > at > org.apache.hadoop.hdfs.DistributedFileSystem$17.doCall(DistributedFileSystem.java:1089) > at > org.apache.hadoop.hdfs.DistributedFileSystem$17.doCall(DistributedFileSystem.java:1085) > at > org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) > at > org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1085) > at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1400) > at > org.apache.hadoop.mapreduce.JobSubmissionFiles.getStagingDir(JobSubmissionFiles.java:116) > at > org.apache.hadoop.mapreduce.JobSubmitter.submitJobInternal(JobSubmitter.java:435) > at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1295) > at org.apache.hadoop.mapreduce.Job$10.run(Job.java:1292) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:415) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1642) > at org.apache.hadoop.mapreduce.Job.submit(Job.java:1292) > at > org.apache.sqoop.submission.mapreduce.MapreduceSubmissionEngine.submit(MapreduceSubmissionEngine.java:246) > at org.apache.sqoop.driver.JobManager.start(JobManager.java:289) > at > org.apache.sqoop.handler.JobRequestHandler.startJob(JobRequestHandler.java:367) > at > org.apache.sqoop.handler.JobRequestHandler.handleEvent(JobRequestHandler.java:112) > at > org.apache.sqoop.server.v1.JobServlet.handlePutRequest(JobServlet.java:96) > at > org.apache.sqoop.server.SqoopProtocolServlet.doPut(SqoopProtocolServlet.java:79) > at javax.servlet.http.HttpServlet.service(HttpServlet.java:646) > at javax.servlet.http.HttpServlet.service(HttpServlet.java:723) > at > org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:290) > at > org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:206) > at > org.apache.hadoop.security.authentication.server.AuthenticationFilter.doFilter(AuthenticationFilter.java:592) > at > org.apache.hadoop.security.authentication.server.AuthenticationFilter.doFilter(AuthenticationFilter.java:555) > at > org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:235) > at > org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:206) > at > org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:233) > at > org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:191) > at > org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:127) > at > org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:103) > at > org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:109) > at > org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:293) > at > org.apache.coyote.http11.Http11Processor.process(Http11Processor.java:861) > at > org.apache.coyote.http11.Http11Protocol$Http11ConnectionHandler.process(Http11Protocol.java:606) > at > org.apache.tomcat.util.net.JIoEndpoint$Worker.run(JIoEndpoint.java:489) > at java.lang.Thread.run(Thread.java:745) > Caused by: > org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException): > SIMPLE authentication is not enabled. Available:[TOKEN, KERBEROS] > at org.apache.hadoop.ipc.Client.call(Client.java:1411) > at org.apache.hadoop.ipc.Client.call(Client.java:1364) > at > org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:206) > at com.sun.proxy.$Proxy23.getFileInfo(Unknown Source) > at > org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:744) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:606) > at > org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187) > at > org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102) > at com.sun.proxy.$Proxy24.getFileInfo(Unknown Source) > at > org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1912) > ... 37 more > > > > Thanks > Kumar >