[
https://issues.apache.org/jira/browse/HBASE-29482?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
]
Junegunn Choi resolved HBASE-29482.
-----------------------------------
Fix Version/s: 2.7.0
3.0.0-beta-2
2.6.4
2.5.13
Resolution: Fixed
> Bulkload fails with viewfs authentication error
> -----------------------------------------------
>
> Key: HBASE-29482
> URL: https://issues.apache.org/jira/browse/HBASE-29482
> Project: HBase
> Issue Type: Bug
> Reporter: Jaehui Lee
> Assignee: Jaehui Lee
> Priority: Minor
> Labels: pull-request-available
> Fix For: 2.7.0, 3.0.0-beta-2, 2.6.4, 2.5.13
>
>
> When performing a bulkload operation where the HFiles are located on a remote
> federated HDFS and accessed via ViewFS paths, authentication error occurs.
> {code:java}
> 2025-07-22T17:38:45,248 WARN
> [RpcServer.default.FPBQ.Fifo.handler=26,queue=2,port=16020] ipc.Client:
> Exception encountered while connecting to the server {server}
> org.apache.hadoop.security.AccessControlException: Client cannot authenticate
> via:[TOKEN, KERBEROS]
> at
> org.apache.hadoop.security.SaslRpcClient.selectSaslClient(SaslRpcClient.java:179)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:399)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:578)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.ipc.Client$Connection.access$2100(Client.java:364)
> ~[hadoop-common-3.4.1.jar:?]
> at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:799)
> ~[hadoop-common-3.4.1.jar:?]
> at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:795)
> ~[hadoop-common-3.4.1.jar:?]
> at
> java.security.AccessController.doPrivileged(AccessController.java:714) ~[?:?]
> at javax.security.auth.Subject.doAs(Subject.java:525) ~[?:?]
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1953)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:795)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.ipc.Client$Connection.access$3800(Client.java:364)
> ~[hadoop-common-3.4.1.jar:?]
> at org.apache.hadoop.ipc.Client.getConnection(Client.java:1649)
> ~[hadoop-common-3.4.1.jar:?]
> at org.apache.hadoop.ipc.Client.call(Client.java:1473)
> ~[hadoop-common-3.4.1.jar:?]
> at org.apache.hadoop.ipc.Client.call(Client.java:1426)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:258)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.ipc.ProtobufRpcEngine2$Invoker.invoke(ProtobufRpcEngine2.java:139)
> ~[hadoop-common-3.4.1.jar:?]
> at jdk.proxy2.$Proxy32.checkAccess(Unknown Source) ~[?:?]
> at
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.lambda$checkAccess$93(ClientNamenodeProtocolTranslatorPB.java:1464)
> ~[hadoop-hdfs-client-3.4.1.jar:?]
> at
> org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc(ShadedProtobufHelper.java:160)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.checkAccess(ClientNamenodeProtocolTranslatorPB.java:1464)
> ~[hadoop-hdfs-client-3.4.1.jar:?]
> at
> jdk.internal.reflect.DirectMethodHandleAccessor.invoke(DirectMethodHandleAccessor.java:103)
> ~[?:?]
> at java.lang.reflect.Method.invoke(Method.java:580) ~[?:?]
> at
> org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:437)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:170)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:162)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:100)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:366)
> ~[hadoop-common-3.4.1.jar:?]
> at jdk.proxy2.$Proxy33.checkAccess(Unknown Source) ~[?:?]
> at org.apache.hadoop.hdfs.DFSClient.checkAccess(DFSClient.java:2987)
> ~[hadoop-hdfs-client-3.4.1.jar:?]
> at
> org.apache.hadoop.hdfs.DistributedFileSystem$66.doCall(DistributedFileSystem.java:3247)
> ~[hadoop-hdfs-client-3.4.1.jar:?]
> at
> org.apache.hadoop.hdfs.DistributedFileSystem$66.doCall(DistributedFileSystem.java:3244)
> ~[hadoop-hdfs-client-3.4.1.jar:?]
> at
> org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.hdfs.DistributedFileSystem.access(DistributedFileSystem.java:3257)
> ~[hadoop-hdfs-client-3.4.1.jar:?]
> at
> org.apache.hadoop.fs.FilterFileSystem.access(FilterFileSystem.java:473)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.fs.viewfs.ChRootedFileSystem.access(ChRootedFileSystem.java:256)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.fs.viewfs.ViewFileSystem.access(ViewFileSystem.java:581)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.hbase.regionserver.HStore.assertBulkLoadHFileOk(HStore.java:607)
> ~[hbase-server-2.5.11-hadoop3.jar:2.5.11-hadoop3]
> at
> org.apache.hadoop.hbase.regionserver.HRegion.bulkLoadHFiles(HRegion.java:6737)
> ~[hbase-server-2.5.11-hadoop3.jar:2.5.11-hadoop3]
> at
> org.apache.hadoop.hbase.regionserver.SecureBulkLoadManager$1.run(SecureBulkLoadManager.java:288)
> ~[hbase-server-2.5.11-hadoop3.jar:2.5.11-hadoop3]
> at
> org.apache.hadoop.hbase.regionserver.SecureBulkLoadManager$1.run(SecureBulkLoadManager.java:263)
> ~[hbase-server-2.5.11-hadoop3.jar:2.5.11-hadoop3]
> at
> java.security.AccessController.doPrivileged(AccessController.java:400) ~[?:?]
> at javax.security.auth.Subject.doAs(Subject.java:453) ~[?:?]
> at
> org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1930)
> ~[hadoop-common-3.4.1.jar:?]
> at
> org.apache.hadoop.hbase.regionserver.SecureBulkLoadManager.secureBulkLoadHFiles(SecureBulkLoadManager.java:263)
> ~[hbase-server-2.5.11-hadoop3.jar:2.5.11-hadoop3]
> at
> org.apache.hadoop.hbase.regionserver.RSRpcServices.bulkLoadHFile(RSRpcServices.java:2443)
> ~[hbase-server-2.5.11-hadoop3.jar:2.5.11-hadoop3]
> at
> org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos$ClientService$2.callBlockingMethod(ClientProtos.java:45008)
> ~[hbase-protocol-shaded-2.5.11-hadoop3.jar:2.5.11-hadoop3]
> at org.apache.hadoop.hbase.ipc.RpcServer.call(RpcServer.java:415)
> ~[hbase-server-2.5.11-hadoop3.jar:2.5.11-hadoop3]
> at org.apache.hadoop.hbase.ipc.CallRunner.run(CallRunner.java:124)
> ~[hbase-server-2.5.11-hadoop3.jar:2.5.11-hadoop3]
> at org.apache.hadoop.hbase.ipc.RpcHandler.run(RpcHandler.java:102)
> ~[hbase-server-2.5.11-hadoop3.jar:2.5.11-hadoop3]
> at org.apache.hadoop.hbase.ipc.RpcHandler.run(RpcHandler.java:82)
> ~[hbase-server-2.5.11-hadoop3.jar:2.5.11-hadoop3] {code}
> This happens because ViewFileSystem cannot generate delegation tokens
> properly for federated HDFS accessed through ViewFS paths.
> Without the delegation token, authentication fails when [checking READ_WRITE
> permissions on the path where HFiles are
> stored|https://github.com/apache/hbase/blob/a699d3032887cec9c39868dd93aa7ba84a9b8e03/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStore.java#L619].
> The simplest solution is to use the resolved HDFS paths instead of ViewFS
> paths when performing the bulkload.
--
This message was sent by Atlassian Jira
(v8.20.10#820010)