[ 
https://issues.apache.org/jira/browse/HIVE-20914?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16717854#comment-16717854
 ] 

Hive QA commented on HIVE-20914:
--------------------------------



Here are the results of testing the latest attachment:
https://issues.apache.org/jira/secure/attachment/12951390/HIVE-20914.8.patch

{color:green}SUCCESS:{color} +1 due to 1 test(s) being added or modified.

{color:red}ERROR:{color} -1 due to 45 failed/errored test(s), 15661 tests 
executed
*Failed tests:*
{noformat}
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testAlterPartition
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testAlterTable
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testAlterTableCascade
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testAlterViewParititon
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testColumnStatistics
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testComplexTable
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testComplexTypeApi
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testConcurrentMetastores
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testCreateAndGetTableWithDriver
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testCreateTableSettingId
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testDBLocationChange
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testDBOwner 
(batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testDBOwnerChange
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testDatabase 
(batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testDatabaseLocation
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testDatabaseLocationWithPermissionProblems
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testDropDatabaseCascadeMVMultiDB
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testDropTable
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testFilterLastPartition
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testFilterSinglePartition
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testFunctionWithResources
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testGetConfigValue
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testGetMetastoreUuid
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testGetPartitionsWithSpec
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testGetSchemaWithNoClassDefFoundError
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testGetTableObjects
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testGetUUIDInParallel
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testJDOPersistanceManagerCleanup
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testListPartitionNames
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testListPartitions
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testListPartitionsWihtLimitEnabled
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testNameMethods
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testPartition
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testPartitionFilter
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testRenamePartition
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testRetriableClientWithConnLifetime
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testSimpleFunction
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testSimpleTable
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testSimpleTypeApi
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testStatsFastTrivial
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testSynchronized
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testTableDatabase
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testTableFilter
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testUpdatePartitionStat_doesNotUpdateStats
 (batchId=229)
org.apache.hadoop.hive.metastore.TestRemoteHiveMetaStoreZKBindHost.testValidateTableCols
 (batchId=229)
{noformat}

Test results: 
https://builds.apache.org/job/PreCommit-HIVE-Build/15262/testReport
Console output: https://builds.apache.org/job/PreCommit-HIVE-Build/15262/console
Test logs: http://104.198.109.242/logs/PreCommit-HIVE-Build-15262/

Messages:
{noformat}
Executing org.apache.hive.ptest.execution.TestCheckPhase
Executing org.apache.hive.ptest.execution.PrepPhase
Executing org.apache.hive.ptest.execution.YetusPhase
Executing org.apache.hive.ptest.execution.ExecutionPhase
Executing org.apache.hive.ptest.execution.ReportingPhase
Tests exited with: TestsFailedException: 45 tests failed
{noformat}

This message is automatically generated.

ATTACHMENT ID: 12951390 - PreCommit-HIVE-Build

> MRScratchDir permission denied when "hive.server2.enable.doAs", 
> "hive.exec.submitviachild" are set to "true" and impersonated/proxy user is 
> used
> ------------------------------------------------------------------------------------------------------------------------------------------------
>
>                 Key: HIVE-20914
>                 URL: https://issues.apache.org/jira/browse/HIVE-20914
>             Project: Hive
>          Issue Type: Bug
>          Components: HiveServer2
>            Reporter: Denys Kuzmenko
>            Assignee: Denys Kuzmenko
>            Priority: Major
>         Attachments: HIVE-20914.1.patch, HIVE-20914.2.patch, 
> HIVE-20914.3.patch, HIVE-20914.4.patch, HIVE-20914.5.patch, 
> HIVE-20914.6.patch, HIVE-20914.7.patch, HIVE-20914.8.patch
>
>
> The above issue could be reproduced in none Kerberos cluster using the below 
> steps:
> 1. Set "hive.exec.submitviachild" value to "true".
> 2. Run a count query not using "hive" user.
> {code}beeline -u 'jdbc:hive2://localhost:10000' -n hdfs{code}
> There is no issue when we try to execute the same query using the "hive" user.
> {code:java}
> Exception in thread "main" java.lang.RuntimeException: 
> org.apache.hadoop.security.AccessControlException: Permission denied: 
> user=hive, access=EXECUTE, inode="/tmp/hive/hdfs":hdfs:supergroup:drwx------ 
> at 
> org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkFsPermission(DefaultAuthorizationProvider.java:279)
>  at 
> org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.check(DefaultAuthorizationProvider.java:260)
>  at 
> org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkTraverse(DefaultAuthorizationProvider.java:201)
>  at 
> org.apache.hadoop.hdfs.server.namenode.DefaultAuthorizationProvider.checkPermission(DefaultAuthorizationProvider.java:154)
>  at 
> org.apache.hadoop.hdfs.server.namenode.FSPermissionChecker.checkPermission(FSPermissionChecker.java:152)
>  at 
> org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:3877)
>  at 
> org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkPermission(FSDirectory.java:3860)
>  at 
> org.apache.hadoop.hdfs.server.namenode.FSDirectory.checkTraverse(FSDirectory.java:3847)
>  at 
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkTraverse(FSNamesystem.java:6822)
>  at 
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInternal(FSNamesystem.java:4551)
>  at 
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirsInt(FSNamesystem.java:4529)
>  at 
> org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:4502)
>  at 
> org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:884)
>  at 
> org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.mkdirs(AuthorizationProviderProxyClientProtocol.java:328)
>  at 
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:641)
>  at 
> org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
>  at 
> org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
>  at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073) at 
> org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2281) at 
> org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2277) at 
> java.security.AccessController.doPrivileged(Native Method) at 
> javax.security.auth.Subject.doAs(Subject.java:422) at 
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1920)
>  at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2275) at 
> org.apache.hadoop.hive.ql.Context.getScratchDir(Context.java:285) at 
> org.apache.hadoop.hive.ql.Context.getMRScratchDir(Context.java:328) at 
> org.apache.hadoop.hive.ql.Context.getMRTmpPath(Context.java:444) at 
> org.apache.hadoop.hive.ql.exec.mr.ExecDriver.execute(ExecDriver.java:243) at 
> org.apache.hadoop.hive.ql.exec.mr.ExecDriver.main(ExecDriver.java:771) at 
> sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  at java.lang.reflect.Method.invoke(Method.java:498) at 
> org.apache.hadoop.util.RunJar.run(RunJar.java:221) at 
> org.apache.hadoop.util.RunJar.main(RunJar.java:136)
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to