[ 
https://issues.apache.org/jira/browse/HIVE-18090?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Eugene Koifman updated HIVE-18090:
----------------------------------
    Affects Version/s:     (was: 3.0.0)
                       1.3.0
                       2.0.0

> acid heartbeat fails when metastore is connected via hadoop credential
> ----------------------------------------------------------------------
>
>                 Key: HIVE-18090
>                 URL: https://issues.apache.org/jira/browse/HIVE-18090
>             Project: Hive
>          Issue Type: Bug
>          Components: HiveServer2, Transactions
>    Affects Versions: 1.3.0, 2.0.0
>            Reporter: anishek
>            Assignee: anishek
>             Fix For: 3.0.0
>
>         Attachments: HIVE-18090.0.patch
>
>
> steps to recreate the issue. assuming two users 
> * test
> * another 
> create two jceks files for each user and place them on hdfs with access to 
> that file only allowed to the user. hdfs locations with permissions 
> {code}
> -rwx------   1 another another        492 2017-11-16 13:06 
> /user/another/another.jceks
> -rwx------   1 test test        489 2017-11-16 13:05 /user/test/test.jceks
> {code}
> password used to create 
> * /user/another/another.jceks -- another
> * /user/test/test.jceks -- test
> on core-site.xml 
> {code}
>     <property>
>         <name>hadoop.proxyuser.[superuser].hosts</name>
>         <value>*</value>
>     </property>
>     <property>
>         <name>hadoop.proxyuser.[superuser].groups</name>
>         <value>*</value>
>     </property>
> {code}
> and restart hdfs.
> enable ACID on HS2 (change the required properties).additional changes on  
> hiveserver2 configs 
> {code}
> * hive.metastore.warehouse.dir=file:///tmp/hive/test-warehouse
> * hive.server2.enable.doAs=true
> * remove javax.jdo.option.ConnectionPassword property from hive-site.xml
> {code}
> start hiveserver2
> connect to the server using beeline using any user:
> {code}
> create table a (i int, b string);
> insert into a values (0 , '0'), (1 , '1'), (2 , '2'), (3 , '3'), (4 , '4'), 
> (5 , '5'), (6 , '6'), (7 , '7'), (8 , '8'), (9 , '9'), (10 , '10'), (11 , 
> '11'), (12 , '12'), (13 , '13'), (14 , '14'), (15 , '15'), (16 , '16'), (17 , 
> '17'), (18 , '18'), (19 , '19'), (20 , '20'), (21 , '21'), (22 , '22'), (23 , 
> '23'), (24 , '24'), (25 , '25'), (26 , '26'), (27 , '27'), (28 , '28'), (29 , 
> '29'), (30 , '30'), (31 , '31'), (32 , '32'), (33 , '33'), (34 , '34'), (35 , 
> '35'), (36 , '36'), (37 , '37'), (38 , '38'), (39 , '39'), (40 , '40'), (41 , 
> '41'), (42 , '42'), (43 , '43'), (44 , '44'), (45 , '45'), (46 , '46'), (47 , 
> '47'), (48 , '48'), (49 , '49'), (50 , '50'), (51 , '51'), (52 , '52'), (53 , 
> '53'), (54 , '54'), (55 , '55'), (56 , '56'), (57 , '57'), (58 , '58'), (59 , 
> '59'), (60 , '60'), (61 , '61'), (62 , '62'), (63 , '63'), (64 , '64'), (65 , 
> '65'), (66 , '66'), (67 , '67'), (68 , '68'), (69 , '69'), (70 , '70'), (71 , 
> '71'), (72 , '72'), (73 , '73'), (74 , '74'), (75 , '75'), (76 , '76'), (77 , 
> '77'), (78 , '78'), (79 , '79'), (80 , '80'), (81 , '81'), (82 , '82'), (83 , 
> '83'), (84 , '84'), (85 , '85'), (86 , '86'), (87 , '87'), (88 , '88'), (89 , 
> '89'), (90 , '90'), (91 , '91'), (92 , '92'), (93 , '93'), (94 , '94'), (95 , 
> '95'), (96 , '96'), (97 , '97'), (98 , '98'), (99 , '99');
> {code}
> exit beeline and connect with user another 
> {code}
> ./beeline -u 
> "jdbc:hive2://localhost:10000/default?hive.strict.checks.cartesian.product=false;hive.txn.timeout=4s;hive.txn.heartbeat.threadpool.size=1;hadoop.security.credential.provider.path=jceks://hdfs/user/another/another.jceks;ssl.server.keystore.keypassword=another"
>  -n another
> create table another_a_acid (i int, b string) clustered by (i) into 8 buckets 
> stored as orc tblproperties('transactional'='tur');
> insert overwrite table another_a_acid select a2.i, a3.b from a a1 join a a2 
> join a a3 on 1=1;
> {code}
> open another beeline session with user test:
> {code}
> ./beeline -u 
> "jdbc:hive2://localhost:10000/default?hive.strict.checks.cartesian.product=false;hive.txn.timeout=4s;hive.txn.heartbeat.threadpool.size=1;hadoop.security.credential.provider.path=jceks://hdfs/user/test/test.jceks;ssl.server.keystore.keypassword=test"
>  -n test
> create table a_acid (i int, b string) clustered by (i) into 8 buckets stored 
> as orc tblproperties('transactional'='tur');
> insert overwrite table a_acid select a2.i, a3.b from a a1 join a a2 join a a3 
> on 1=1;
> {code}
> fails with exception 
> {code}
> 2017-11-17T12:15:52,664 DEBUG [Heartbeater-1] retry.RetryInvocationHandler: 
> Exception while invoking ClientNamenodeProtocolTranslatorPB.getFileInfo over 
> null. Not retrying because try once and fail.
> org.apache.hadoop.ipc.RemoteException: Permission denied: user=test, 
> access=EXECUTE, inode="/user/another/another.jceks":another:another:drwx------
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:319)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:259)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:205)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:190)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1955)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getFileInfo(FSDirStatAndListingOp.java:109)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getFileInfo(FSNamesystem.java:4111)
>       at 
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1137)
>       at 
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:866)
>       at 
> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>       at 
> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640)
>       at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
>       at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2351)
>       at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2347)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.security.auth.Subject.doAs(Subject.java:422)
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
>       at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2345)
>       at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1554) 
> ~[hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at org.apache.hadoop.ipc.Client.call(Client.java:1498) 
> ~[hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at org.apache.hadoop.ipc.Client.call(Client.java:1398) 
> ~[hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233)
>  ~[hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at com.sun.proxy.$Proxy30.getFileInfo(Unknown Source) ~[?:?]
>       at 
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:818)
>  ~[hadoop-hdfs-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at sun.reflect.GeneratedMethodAccessor14.invoke(Unknown Source) ~[?:?]
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  ~[?:1.8.0_112]
>       at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
>       at 
> org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:291)
>  [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:203)
>  [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:185)
>  [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at com.sun.proxy.$Proxy31.getFileInfo(Unknown Source) [?:?]
>       at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:2165) 
> [hadoop-hdfs-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.hdfs.DistributedFileSystem$26.doCall(DistributedFileSystem.java:1442)
>  [hadoop-hdfs-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.hdfs.DistributedFileSystem$26.doCall(DistributedFileSystem.java:1438)
>  [hadoop-hdfs-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
>  [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1438)
>  [hadoop-hdfs-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1447) 
> [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.security.alias.JavaKeyStoreProvider.keystoreExists(JavaKeyStoreProvider.java:65)
>  [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider.<init>(AbstractJavaKeyStoreProvider.java:105)
>  [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:49)
>  [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:41)
>  [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.security.alias.JavaKeyStoreProvider$Factory.createProvider(JavaKeyStoreProvider.java:100)
>  [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.security.alias.CredentialProviderFactory.getProviders(CredentialProviderFactory.java:61)
>  [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.conf.Configuration.getPasswordFromCredentialProviders(Configuration.java:1992)
>  [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.conf.Configuration.getPassword(Configuration.java:1972) 
> [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.hive.metastore.conf.MetastoreConf.getPassword(MetastoreConf.java:1334)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.ObjectStore.getDataSourceProps(ObjectStore.java:571)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:312) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76) 
> [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136) 
> [hadoop-common-2.7.3.2.6.1.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:59) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:677)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:643)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:637)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:544)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at sun.reflect.GeneratedMethodAccessor58.invoke(Unknown Source) ~[?:?]
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  ~[?:1.8.0_112]
>       at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_112]
>       at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:147)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:108)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:80)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:93)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:7516)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:169)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:77)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at sun.reflect.GeneratedConstructorAccessor148.newInstance(Unknown 
> Source) [?:1.8.0_112]
>       at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>  [?:1.8.0_112]
>       at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
> [?:1.8.0_112]
>       at 
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1445)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:4051) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4103) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4083) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.ql.lockmgr.DbTxnManager.getMS(DbTxnManager.java:158) 
> [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.ql.lockmgr.DbTxnManager.heartbeat(DbTxnManager.java:610)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.ql.lockmgr.DbTxnManager$Heartbeater.run(DbTxnManager.java:878)
>  [hive-exec-3.0.0-SNAPSHOT.jar:3.0.0-SNAPSHOT]
>       at 
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) 
> [?:1.8.0_112]
>       at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308) 
> [?:1.8.0_112]
>       at 
> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
>  [?:1.8.0_112]
>       at 
> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
>  [?:1.8.0_112]
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>  [?:1.8.0_112]
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>  [?:1.8.0_112]
>       at java.lang.Thread.run(Thread.java:745) [?:1.8.0_112]
> 2017-11-17T12:15:52,670 ERROR [Heartbeater-1] metastore.RetryingHMSHandler: 
> java.lang.RuntimeException: Error getting metastore password: Configuration 
> problem with provider path.
>       at 
> org.apache.hadoop.hive.metastore.ObjectStore.getDataSourceProps(ObjectStore.java:577)
>       at 
> org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:312)
>       at 
> org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:76)
>       at 
> org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:136)
>       at 
> org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:59)
>       at 
> org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:67)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStoreForConf(HiveMetaStore.java:677)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMSForConf(HiveMetaStore.java:643)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:637)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:544)
>       at sun.reflect.GeneratedMethodAccessor58.invoke(Unknown Source)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:498)
>       at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.invokeInternal(RetryingHMSHandler.java:147)
>       at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.invoke(RetryingHMSHandler.java:108)
>       at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:80)
>       at 
> org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:93)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:7516)
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:169)
>       at 
> org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:77)
>       at sun.reflect.GeneratedConstructorAccessor148.newInstance(Unknown 
> Source)
>       at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>       at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>       at 
> org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1445)
>       at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:83)
>       at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:133)
>       at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
>       at 
> org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:4051)
>       at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4103)
>       at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:4083)
>       at 
> org.apache.hadoop.hive.ql.lockmgr.DbTxnManager.getMS(DbTxnManager.java:158)
>       at 
> org.apache.hadoop.hive.ql.lockmgr.DbTxnManager.heartbeat(DbTxnManager.java:610)
>       at 
> org.apache.hadoop.hive.ql.lockmgr.DbTxnManager$Heartbeater.run(DbTxnManager.java:878)
>       at 
> java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
>       at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
>       at 
> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
>       at 
> java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
>       at 
> java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
>       at 
> java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
>       at java.lang.Thread.run(Thread.java:745)
> Caused by: java.io.IOException: Configuration problem with provider path.
>       at 
> org.apache.hadoop.conf.Configuration.getPasswordFromCredentialProviders(Configuration.java:2012)
>       at 
> org.apache.hadoop.conf.Configuration.getPassword(Configuration.java:1972)
>       at 
> org.apache.hadoop.hive.metastore.conf.MetastoreConf.getPassword(MetastoreConf.java:1334)
>       at 
> org.apache.hadoop.hive.metastore.ObjectStore.getDataSourceProps(ObjectStore.java:571)
>       ... 39 more
> Caused by: org.apache.hadoop.security.AccessControlException: Permission 
> denied: user=test, access=EXECUTE, 
> inode="/user/another/another.jceks":another:another:drwx------
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:319)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:259)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:205)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:190)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1955)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getFileInfo(FSDirStatAndListingOp.java:109)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getFileInfo(FSNamesystem.java:4111)
>       at 
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1137)
>       at 
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:866)
>       at 
> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>       at 
> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640)
>       at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
>       at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2351)
>       at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2347)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.security.auth.Subject.doAs(Subject.java:422)
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
>       at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2345)
>       at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
>       at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
>       at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
>       at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
>       at 
> org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
>       at 
> org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:73)
>       at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:2167)
>       at 
> org.apache.hadoop.hdfs.DistributedFileSystem$26.doCall(DistributedFileSystem.java:1442)
>       at 
> org.apache.hadoop.hdfs.DistributedFileSystem$26.doCall(DistributedFileSystem.java:1438)
>       at 
> org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
>       at 
> org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1438)
>       at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1447)
>       at 
> org.apache.hadoop.security.alias.JavaKeyStoreProvider.keystoreExists(JavaKeyStoreProvider.java:65)
>       at 
> org.apache.hadoop.security.alias.AbstractJavaKeyStoreProvider.<init>(AbstractJavaKeyStoreProvider.java:105)
>       at 
> org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:49)
>       at 
> org.apache.hadoop.security.alias.JavaKeyStoreProvider.<init>(JavaKeyStoreProvider.java:41)
>       at 
> org.apache.hadoop.security.alias.JavaKeyStoreProvider$Factory.createProvider(JavaKeyStoreProvider.java:100)
>       at 
> org.apache.hadoop.security.alias.CredentialProviderFactory.getProviders(CredentialProviderFactory.java:61)
>       at 
> org.apache.hadoop.conf.Configuration.getPasswordFromCredentialProviders(Configuration.java:1992)
>       ... 42 more
> Caused by: 
> org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.security.AccessControlException):
>  Permission denied: user=test, access=EXECUTE, 
> inode="/user/another/another.jceks":another:another:drwx------
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.check(FSPermissionChecker.java:319)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkTraverse(FSPermissionChecker.java:259)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:205)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:190)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:1955)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.getFileInfo(FSDirStatAndListingOp.java:109)
>       at 
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getFileInfo(FSNamesystem.java:4111)
>       at 
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1137)
>       at 
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:866)
>       at 
> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>       at 
> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640)
>       at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
>       at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2351)
>       at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2347)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.security.auth.Subject.doAs(Subject.java:422)
>       at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1866)
>       at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2345)
>       at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1554)
>       at org.apache.hadoop.ipc.Client.call(Client.java:1498)
>       at org.apache.hadoop.ipc.Client.call(Client.java:1398)
>       at 
> org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:233)
>       at com.sun.proxy.$Proxy30.getFileInfo(Unknown Source)
>       at 
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:818)
>       at sun.reflect.GeneratedMethodAccessor14.invoke(Unknown Source)
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>       at java.lang.reflect.Method.invoke(Method.java:498)
>       at 
> org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:291)
>       at 
> org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:203)
>       at 
> org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:185)
>       at com.sun.proxy.$Proxy31.getFileInfo(Unknown Source)
>       at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:2165)
>       ... 54 more
> {code}
> above will only work if the _insert overwrite_ query takes longer than 
> _hive.txn.timeout / 2 = 4 / 2 = 2seconds_



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)

Reply via email to