This is an automated email from the ASF dual-hosted git repository.

lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git


The following commit(s) were added to refs/heads/master by this push:
     new f86f86ba9e [core] Support Kerberos authentication (#6059)
f86f86ba9e is described below

commit f86f86ba9e57a67e47aab096b43cd3b09958628d
Author: yunfengzhou-hub <[email protected]>
AuthorDate: Wed Aug 13 16:50:46 2025 +0800

    [core] Support Kerberos authentication (#6059)
---
 docs/content/maintenance/filesystems.md            |  11 +-
 .../org/apache/paimon/fs/hadoop/HadoopFileIO.java  |  11 +-
 .../paimon/fs/hadoop/HadoopSecuredFileSystem.java  | 170 +++++++++++++++++++++
 .../fs/hadoop/HadoopSecuredFileSystemTest.java     |  49 ++++++
 4 files changed, 238 insertions(+), 3 deletions(-)

diff --git a/docs/content/maintenance/filesystems.md 
b/docs/content/maintenance/filesystems.md
index 960aea6ec6..93ce7bbb0c 100644
--- a/docs/content/maintenance/filesystems.md
+++ b/docs/content/maintenance/filesystems.md
@@ -125,7 +125,16 @@ For Alluxio support add the following entry into the 
core-site.xml file:
 
 {{< tab "Flink" >}}
 
-It is recommended to use [Flink Kerberos 
Keytab](https://nightlies.apache.org/flink/flink-docs-stable/docs/deployment/security/security-kerberos/).
+Configure the following options in your catalog configuration:
+
+- security.kerberos.login.keytab: Absolute path to a Kerberos keytab file that 
contains the user credentials.
+  Please make sure it is copied to each machine.
+- security.kerberos.login.principal: Kerberos principal name associated with 
the keytab.
+
+And configure the following option in the program's java property:
+
+- java.security.krb5.conf: Absolute path to the Kerberos configuration file.
+  Please make sure it is copied to each machine.
 
 {{< /tab >}}
 
diff --git 
a/paimon-common/src/main/java/org/apache/paimon/fs/hadoop/HadoopFileIO.java 
b/paimon-common/src/main/java/org/apache/paimon/fs/hadoop/HadoopFileIO.java
index 5c5acb7259..918ef5f5c4 100644
--- a/paimon-common/src/main/java/org/apache/paimon/fs/hadoop/HadoopFileIO.java
+++ b/paimon-common/src/main/java/org/apache/paimon/fs/hadoop/HadoopFileIO.java
@@ -54,6 +54,8 @@ public class HadoopFileIO implements FileIO {
 
     protected SerializableConfiguration hadoopConf;
 
+    private org.apache.paimon.options.Options options;
+
     protected transient volatile Map<Pair<String, String>, FileSystem> fsMap;
 
     @VisibleForTesting
@@ -70,6 +72,7 @@ public class HadoopFileIO implements FileIO {
     @Override
     public void configure(CatalogContext context) {
         this.hadoopConf = new SerializableConfiguration(context.hadoopConf());
+        this.options = context.options();
     }
 
     public Configuration hadoopConf() {
@@ -167,7 +170,8 @@ public class HadoopFileIO implements FileIO {
         return new org.apache.hadoop.fs.Path(path.toUri());
     }
 
-    private FileSystem getFileSystem(org.apache.hadoop.fs.Path path) throws 
IOException {
+    @VisibleForTesting
+    FileSystem getFileSystem(org.apache.hadoop.fs.Path path) throws 
IOException {
         return getFileSystem(path, this::createFileSystem);
     }
 
@@ -198,7 +202,10 @@ public class HadoopFileIO implements FileIO {
     }
 
     protected FileSystem createFileSystem(org.apache.hadoop.fs.Path path) 
throws IOException {
-        return path.getFileSystem(hadoopConf.get());
+        Configuration conf = hadoopConf.get();
+        FileSystem fileSystem = path.getFileSystem(conf);
+        fileSystem = HadoopSecuredFileSystem.trySecureFileSystem(fileSystem, 
options, conf);
+        return fileSystem;
     }
 
     private static class HadoopSeekableInputStream extends SeekableInputStream 
{
diff --git 
a/paimon-common/src/main/java/org/apache/paimon/fs/hadoop/HadoopSecuredFileSystem.java
 
b/paimon-common/src/main/java/org/apache/paimon/fs/hadoop/HadoopSecuredFileSystem.java
new file mode 100644
index 0000000000..e74bffbf68
--- /dev/null
+++ 
b/paimon-common/src/main/java/org/apache/paimon/fs/hadoop/HadoopSecuredFileSystem.java
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.fs.hadoop;
+
+import org.apache.paimon.options.Options;
+import org.apache.paimon.security.HadoopModule;
+import org.apache.paimon.security.SecurityConfiguration;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.URI;
+import java.security.PrivilegedExceptionAction;
+import java.util.concurrent.Callable;
+
+/** {@link FileSystem} with a hadoop login user installed through {@link 
HadoopModule}. */
+public class HadoopSecuredFileSystem extends FileSystem {
+    private static final Logger LOG = 
LoggerFactory.getLogger(HadoopSecuredFileSystem.class);
+
+    private final FileSystem fileSystem;
+    private final UserGroupInformation ugi;
+
+    private HadoopSecuredFileSystem(FileSystem fileSystem, 
UserGroupInformation ugi) {
+        this.fileSystem = fileSystem;
+        this.ugi = ugi;
+    }
+
+    @Override
+    public Configuration getConf() {
+        return fileSystem.getConf();
+    }
+
+    @Override
+    public URI getUri() {
+        return runSecured(fileSystem::getUri);
+    }
+
+    @Override
+    public FSDataInputStream open(Path path, int i) throws IOException {
+        return runSecuredWithIOException(() -> fileSystem.open(path, i));
+    }
+
+    @Override
+    public FSDataOutputStream create(
+            Path path,
+            FsPermission fsPermission,
+            boolean b,
+            int i,
+            short i1,
+            long l,
+            Progressable progressable)
+            throws IOException {
+        return runSecuredWithIOException(
+                () -> fileSystem.create(path, fsPermission, b, i, i1, l, 
progressable));
+    }
+
+    @Override
+    public boolean exists(Path f) throws IOException {
+        return runSecuredWithIOException(() -> fileSystem.exists(f));
+    }
+
+    @Override
+    public FSDataOutputStream append(Path path, int i, Progressable 
progressable)
+            throws IOException {
+        return runSecuredWithIOException(() -> fileSystem.append(path, i, 
progressable));
+    }
+
+    @Override
+    public boolean rename(Path path, Path path1) throws IOException {
+        return runSecuredWithIOException(() -> fileSystem.rename(path, path1));
+    }
+
+    @Override
+    public boolean delete(Path path, boolean b) throws IOException {
+        return runSecuredWithIOException(() -> fileSystem.delete(path, b));
+    }
+
+    @Override
+    public FileStatus[] listStatus(Path path) throws IOException {
+        return runSecuredWithIOException(() -> fileSystem.listStatus(path));
+    }
+
+    @Override
+    public void setWorkingDirectory(Path path) {
+        runSecured(() -> fileSystem.setWorkingDirectory(path));
+    }
+
+    @Override
+    public Path getWorkingDirectory() {
+        return runSecured(fileSystem::getWorkingDirectory);
+    }
+
+    @Override
+    public boolean mkdirs(Path path, FsPermission fsPermission) throws 
IOException {
+        return runSecuredWithIOException(() -> fileSystem.mkdirs(path, 
fsPermission));
+    }
+
+    @Override
+    public FileStatus getFileStatus(Path path) throws IOException {
+        return runSecuredWithIOException(() -> fileSystem.getFileStatus(path));
+    }
+
+    private void runSecured(final Runnable securedRunnable) {
+        runSecured(
+                () -> {
+                    securedRunnable.run();
+                    return null;
+                });
+    }
+
+    private <T> T runSecured(final Callable<T> securedCallable) {
+        try {
+            return ugi.doAs((PrivilegedExceptionAction<T>) 
securedCallable::call);
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    private <T> T runSecuredWithIOException(final Callable<T> securedCallable) 
throws IOException {
+        try {
+            return ugi.doAs((PrivilegedExceptionAction<T>) 
securedCallable::call);
+        } catch (IOException e) {
+            throw e;
+        } catch (Exception e) {
+            throw new RuntimeException(e);
+        }
+    }
+
+    public static FileSystem trySecureFileSystem(
+            FileSystem fileSystem, Options options, Configuration 
configuration)
+            throws IOException {
+        SecurityConfiguration config = new SecurityConfiguration(options);
+        if (config.isLegal()) {
+            LOG.info("Hadoop security configuration is legal, use the secured 
FileSystem.");
+            HadoopModule module = new HadoopModule(config, configuration);
+            module.install();
+            UserGroupInformation ugi = UserGroupInformation.getLoginUser();
+            return new HadoopSecuredFileSystem(fileSystem, ugi);
+        } else {
+            LOG.info("Hadoop security configuration is illegal, use the 
original FileSystem.");
+            return fileSystem;
+        }
+    }
+}
diff --git 
a/paimon-common/src/test/java/org/apache/paimon/fs/hadoop/HadoopSecuredFileSystemTest.java
 
b/paimon-common/src/test/java/org/apache/paimon/fs/hadoop/HadoopSecuredFileSystemTest.java
new file mode 100644
index 0000000000..1737434c8a
--- /dev/null
+++ 
b/paimon-common/src/test/java/org/apache/paimon/fs/hadoop/HadoopSecuredFileSystemTest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.paimon.fs.hadoop;
+
+import org.apache.paimon.catalog.CatalogContext;
+import org.apache.paimon.options.Options;
+
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.io.TempDir;
+
+import java.io.File;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+/** Test for {@link HadoopSecuredFileSystem}. */
+public class HadoopSecuredFileSystemTest {
+    @TempDir private java.nio.file.Path tmp;
+
+    @Test
+    public void test() throws Exception {
+        File keytabFile = new File(tmp.toFile(), "test-keytab.keytab");
+        assertThat(keytabFile.createNewFile()).isTrue();
+
+        Options options = new Options();
+        options.set("security.kerberos.login.principal", "test-user");
+        options.set("security.kerberos.login.keytab", 
keytabFile.getAbsolutePath());
+
+        HadoopFileIO fileIO = new HadoopFileIO();
+        fileIO.configure(CatalogContext.create(options));
+        assertThat(fileIO.getFileSystem(new 
org.apache.hadoop.fs.Path("file:///tmp/test")))
+                .isInstanceOf(HadoopSecuredFileSystem.class);
+    }
+}

Reply via email to