[
https://issues.apache.org/jira/browse/FALCON-799?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Balu Vellanki updated FALCON-799:
---------------------------------
Issue Type: Sub-task (was: Bug)
Parent: FALCON-708
> Users cannot make web REST api calls on server with umask 077
> -------------------------------------------------------------
>
> Key: FALCON-799
> URL: https://issues.apache.org/jira/browse/FALCON-799
> Project: Falcon
> Issue Type: Sub-task
> Components: webapp
> Affects Versions: 0.6
> Reporter: Balu Vellanki
> Fix For: 0.6
>
>
> After applying patch for FALCON-753, I attempted to fetch list of instances
> for a process owned by "hrt_qa" using Falcon UI.
> http://172.18.145.72:15443/api/instance/status/process/rawEmailIngestProcess?start=2014-10-06T00:00Z&end=2014-10-11T20:35Z
> This fails with the following error
> {code}
> <instancesResult>
> <status>FAILED</status>
> <message>
> org.apache.falcon.FalconException:
> org.apache.hadoop.security.AccessControlException: Permission denied:
> user=falcon-dashboard, access=EXECUTE,
> inode="/apps/falcon/primaryCluster/staging/falcon":hrt_qa:users:drwx------ at
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkFsPermission(FSPermissionChecker.java:271)
> at
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:257)
> at
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:208)
> at
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:171)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6423)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6405)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPathAccess(FSNamesystem.java:6330)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListingInt(FSNamesystem.java:4867)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListing(FSNamesystem.java:4828)
> at
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getListing(NameNodeRpcServer.java:811)
> at
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getListing(ClientNamenodeProtocolServerSideTranslatorPB.java:611)
> at
> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
> at
> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)
> at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:962) at
> org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2039) at
> org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2035) at
> java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:396) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1614)
> at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2033) at
> org.apache.falcon.entity.EntityUtil.getAllStagingPaths(EntityUtil.java:555)
> at
> org.apache.falcon.workflow.engine.OozieWorkflowEngine.findBundles(OozieWorkflowEngine.java:269)
> at
> org.apache.falcon.workflow.engine.OozieWorkflowEngine.findBundles(OozieWorkflowEngine.java:304)
> at
> org.apache.falcon.workflow.engine.OozieWorkflowEngine.getCoordActions(OozieWorkflowEngine.java:843)
> at
> org.apache.falcon.workflow.engine.OozieWorkflowEngine.doJobAction(OozieWorkflowEngine.java:549)
> at
> org.apache.falcon.workflow.engine.OozieWorkflowEngine.getStatus(OozieWorkflowEngine.java:519)
> at
> org.apache.falcon.resource.AbstractInstanceManager.getStatus(AbstractInstanceManager.java:129)
> at
> org.apache.falcon.resource.InstanceManager.getStatus(InstanceManager.java:99)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> at java.lang.reflect.Method.invoke(Method.java:597) at
> org.apache.falcon.resource.channel.IPCChannel.invoke(IPCChannel.java:49) at
> org.apache.falcon.resource.proxy.InstanceManagerProxy$3.doExecute(InstanceManagerProxy.java:151)
> at
> org.apache.falcon.resource.proxy.InstanceManagerProxy$InstanceProxy.execute(InstanceManagerProxy.java:332)
> at
> org.apache.falcon.resource.proxy.InstanceManagerProxy.getStatus_aroundBody4(InstanceManagerProxy.java:155)
> at
> org.apache.falcon.resource.proxy.InstanceManagerProxy$AjcClosure5.run(InstanceManagerProxy.java:1)
> at org.aspectj.runtime.reflect.JoinPointImpl.proceed(JoinPointImpl.java:149)
> at
> org.apache.falcon.aspect.AbstractFalconAspect.logAroundMonitored(AbstractFalconAspect.java:51)
> at
> org.apache.falcon.resource.proxy.InstanceManagerProxy.getStatus(InstanceManagerProxy.java:136)
> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
> at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> at java.lang.reflect.Method.invoke(Method.java:597) at
> com.sun.jersey.spi.container.JavaMethodInvokerFactory$1.invoke(JavaMethodInvokerFactory.java:60)
> at
> com.sun.jersey.server.impl.model.method.dispatch.AbstractResourceMethodDispatchProvider$TypeOutInvoker._dispatch(AbstractResourceMethodDispatchProvider.java:185)
> at
> com.sun.jersey.server.impl.model.method.dispatch.ResourceJavaMethodDispatcher.dispatch(ResourceJavaMethodDispatcher.java:75)
> at
> com.sun.jersey.server.impl.uri.rules.HttpMethodRule.accept(HttpMethodRule.java:288)
> at
> com.sun.jersey.server.impl.uri.rules.RightHandPathRule.accept(RightHandPathRule.java:147)
> at
> com.sun.jersey.server.impl.uri.rules.ResourceClassRule.accept(ResourceClassRule.java:108)
> at
> com.sun.jersey.server.impl.uri.rules.RightHandPathRule.accept(RightHandPathRule.java:147)
> at
> com.sun.jersey.server.impl.uri.rules.RootResourceClassesRule.accept(RootResourceClassesRule.java:84)
> at
> com.sun.jersey.server.impl.application.WebApplicationImpl._handleRequest(WebApplicationImpl.java:1469)
> at
> com.sun.jersey.server.impl.application.WebApplicationImpl._handleRequest(WebApplicationImpl.java:1400)
> at
> com.sun.jersey.server.impl.application.WebApplicationImpl.handleRequest(WebApplicationImpl.java:1349)
> at
> com.sun.jersey.server.impl.application.WebApplicationImpl.handleRequest(WebApplicationImpl.java:1339)
> at
> com.sun.jersey.spi.container.servlet.WebComponent.service(WebComponent.java:416)
> at
> com.sun.jersey.spi.container.servlet.ServletContainer.service(ServletContainer.java:537)
> at
> com.sun.jersey.spi.container.servlet.ServletContainer.service(ServletContainer.java:699)
> at javax.servlet.http.HttpServlet.service(HttpServlet.java:820) at
> org.mortbay.jetty.servlet.ServletHolder.handle(ServletHolder.java:511) at
> org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1221)
> at
> org.apache.falcon.security.FalconAuthorizationFilter.doFilter(FalconAuthorizationFilter.java:73)
> at
> org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212)
> at
> org.apache.falcon.security.FalconAuthenticationFilter$2.doFilter(FalconAuthenticationFilter.java:187)
> at
> org.apache.hadoop.security.authentication.server.AuthenticationFilter.doFilter(AuthenticationFilter.java:572)
> at
> org.apache.hadoop.security.authentication.server.AuthenticationFilter.doFilter(AuthenticationFilter.java:542)
> at
> org.apache.falcon.security.FalconAuthenticationFilter.doFilter(FalconAuthenticationFilter.java:197)
> at
> org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212)
> at
> org.apache.falcon.security.FalconAuditFilter.doFilter(FalconAuditFilter.java:56)
> at
> org.mortbay.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1212)
> at org.mortbay.jetty.servlet.ServletHandler.handle(ServletHandler.java:399)
> at
> org.mortbay.jetty.security.SecurityHandler.handle(SecurityHandler.java:216)
> at org.mortbay.jetty.servlet.SessionHandler.handle(SessionHandler.java:182)
> at org.mortbay.jetty.handler.ContextHandler.handle(ContextHandler.java:766)
> at org.mortbay.jetty.webapp.WebAppContext.handle(WebAppContext.java:450) at
> org.mortbay.jetty.handler.HandlerWrapper.handle(HandlerWrapper.java:152) at
> org.mortbay.jetty.Server.handle(Server.java:326) at
> org.mortbay.jetty.HttpConnection.handleRequest(HttpConnection.java:542) at
> org.mortbay.jetty.HttpConnection$RequestHandler.headerComplete(HttpConnection.java:928)
> at org.mortbay.jetty.HttpParser.parseNext(HttpParser.java:549) at
> org.mortbay.jetty.HttpParser.parseAvailable(HttpParser.java:212) at
> org.mortbay.jetty.HttpConnection.handle(HttpConnection.java:404) at
> org.mortbay.jetty.bio.SocketConnector$Connection.run(SocketConnector.java:228)
> at
> org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:582)
> Caused by: org.apache.hadoop.security.AccessControlException: Permission
> denied: user=falcon-dashboard, access=EXECUTE,
> inode="/apps/falcon/primaryCluster/staging/falcon":hrt_qa:users:drwx------ at
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkFsPermission(FSPermissionChecker.java:271)
> at
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:257)
> at
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:208)
> at
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:171)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6423)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6405)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPathAccess(FSNamesystem.java:6330)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListingInt(FSNamesystem.java:4867)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListing(FSNamesystem.java:4828)
> at
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getListing(NameNodeRpcServer.java:811)
> at
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getListing(ClientNamenodeProtocolServerSideTranslatorPB.java:611)
> at
> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
> at
> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)
> at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:962) at
> org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2039) at
> org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2035) at
> java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:396) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1614)
> at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2033) at
> sun.reflect.GeneratedConstructorAccessor68.newInstance(Unknown Source) at
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:27)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:513) at
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
> at
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
> at org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:1907) at
> org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:1888) at
> org.apache.hadoop.hdfs.DistributedFileSystem.listStatusInternal(DistributedFileSystem.java:693)
> at
> org.apache.hadoop.hdfs.DistributedFileSystem.access$600(DistributedFileSystem.java:105)
> at
> org.apache.hadoop.hdfs.DistributedFileSystem$15.doCall(DistributedFileSystem.java:755)
> at
> org.apache.hadoop.hdfs.DistributedFileSystem$15.doCall(DistributedFileSystem.java:751)
> at
> org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
> at
> org.apache.hadoop.hdfs.DistributedFileSystem.listStatus(DistributedFileSystem.java:751)
> at org.apache.hadoop.fs.FileSystem.listStatus(FileSystem.java:1485) at
> org.apache.hadoop.fs.FileSystem.listStatus(FileSystem.java:1525) at
> org.apache.falcon.entity.EntityUtil.getAllStagingPaths(EntityUtil.java:544)
> ... 64 more Caused by:
> org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException):
> Permission denied: user=falcon-dashboard, access=EXECUTE,
> inode="/apps/falcon/primaryCluster/staging/falcon":hrt_qa:users:drwx------ at
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkFsPermission(FSPermissionChecker.java:271)
> at
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:257)
> at
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:208)
> at
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:171)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6423)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPermission(FSNamesystem.java:6405)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkPathAccess(FSNamesystem.java:6330)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListingInt(FSNamesystem.java:4867)
> at
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getListing(FSNamesystem.java:4828)
> at
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getListing(NameNodeRpcServer.java:811)
> at
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getListing(ClientNamenodeProtocolServerSideTranslatorPB.java:611)
> at
> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
> at
> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:619)
> at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:962) at
> org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2039) at
> org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2035) at
> java.security.AccessController.doPrivileged(Native Method) at
> javax.security.auth.Subject.doAs(Subject.java:396) at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1614)
> at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2033) at
> org.apache.hadoop.ipc.Client.call(Client.java:1468) at
> org.apache.hadoop.ipc.Client.call(Client.java:1399) at
> org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
> at $Proxy27.getListing(Unknown Source) at
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getListing(ClientNamenodeProtocolTranslatorPB.java:555)
> at sun.reflect.GeneratedMethodAccessor33.invoke(Unknown Source) at
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
> at java.lang.reflect.Method.invoke(Method.java:597) at
> org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
> at
> org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
> at $Proxy28.getListing(Unknown Source) at
> org.apache.hadoop.hdfs.DFSClient.listPaths(DFSClient.java:1905) ... 74 more
> {code}
> This needs to be fixed by making html5-ui/js/falcon.js set USER_ID to
> "hrt_qa" or whoever the user is, instead of defaulting to falcon-dashboard.
> This means user should specify user.name=<val> in query once, and falcon
> should remember this for subsequent REST api calls.
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)