This is an automated email from the ASF dual-hosted git repository.

singhpk234 pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/iceberg.git


The following commit(s) were added to refs/heads/main by this push:
     new ea8eb86008 Core, Flink: Use helper method to filter by prefix (#14610)
ea8eb86008 is described below

commit ea8eb86008c8bbc1b957e354e53c1011bfd0a50a
Author: Yuya Ebihara <[email protected]>
AuthorDate: Tue Nov 18 12:02:10 2025 +0900

    Core, Flink: Use helper method to filter by prefix (#14610)
---
 .../main/java/org/apache/iceberg/hadoop/HadoopTables.java   | 13 +------------
 core/src/main/java/org/apache/iceberg/jdbc/JdbcUtil.java    |  9 ++-------
 core/src/main/java/org/apache/iceberg/rest/RESTUtil.java    | 12 ++----------
 .../flink/maintenance/api/RewriteDataFilesConfig.java       | 12 ++----------
 4 files changed, 7 insertions(+), 39 deletions(-)

diff --git a/core/src/main/java/org/apache/iceberg/hadoop/HadoopTables.java 
b/core/src/main/java/org/apache/iceberg/hadoop/HadoopTables.java
index 764d0d7d86..8d980b7ba1 100644
--- a/core/src/main/java/org/apache/iceberg/hadoop/HadoopTables.java
+++ b/core/src/main/java/org/apache/iceberg/hadoop/HadoopTables.java
@@ -20,7 +20,6 @@ package org.apache.iceberg.hadoop;
 
 import java.io.IOException;
 import java.io.UncheckedIOException;
-import java.util.Iterator;
 import java.util.Map;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
@@ -46,7 +45,6 @@ import org.apache.iceberg.exceptions.NoSuchTableException;
 import 
org.apache.iceberg.relocated.com.google.common.annotations.VisibleForTesting;
 import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
 import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
-import org.apache.iceberg.relocated.com.google.common.collect.Maps;
 import org.apache.iceberg.util.LockManagers;
 import org.apache.iceberg.util.Pair;
 import org.slf4j.Logger;
@@ -215,16 +213,7 @@ public class HadoopTables implements Tables, Configurable {
 
   private static synchronized LockManager createOrGetLockManager(HadoopTables 
table) {
     if (lockManager == null) {
-      Map<String, String> properties = Maps.newHashMap();
-      Iterator<Map.Entry<String, String>> configEntries = 
table.conf.iterator();
-      while (configEntries.hasNext()) {
-        Map.Entry<String, String> entry = configEntries.next();
-        String key = entry.getKey();
-        if (key.startsWith(LOCK_PROPERTY_PREFIX)) {
-          properties.put(key.substring(LOCK_PROPERTY_PREFIX.length()), 
entry.getValue());
-        }
-      }
-
+      Map<String, String> properties = 
table.conf.getPropsWithPrefix(LOCK_PROPERTY_PREFIX);
       lockManager = LockManagers.from(properties);
     }
 
diff --git a/core/src/main/java/org/apache/iceberg/jdbc/JdbcUtil.java 
b/core/src/main/java/org/apache/iceberg/jdbc/JdbcUtil.java
index 544e9f39c7..85e59328db 100644
--- a/core/src/main/java/org/apache/iceberg/jdbc/JdbcUtil.java
+++ b/core/src/main/java/org/apache/iceberg/jdbc/JdbcUtil.java
@@ -33,6 +33,7 @@ import 
org.apache.iceberg.relocated.com.google.common.base.Preconditions;
 import org.apache.iceberg.relocated.com.google.common.base.Splitter;
 import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
 import org.apache.iceberg.relocated.com.google.common.collect.Maps;
+import org.apache.iceberg.util.PropertyUtil;
 
 final class JdbcUtil {
   // property to control strict-mode (aka check if namespace exists when 
creating a table)
@@ -517,13 +518,7 @@ final class JdbcUtil {
 
   static Properties filterAndRemovePrefix(Map<String, String> properties, 
String prefix) {
     Properties result = new Properties();
-    properties.forEach(
-        (key, value) -> {
-          if (key.startsWith(prefix)) {
-            result.put(key.substring(prefix.length()), value);
-          }
-        });
-
+    result.putAll(PropertyUtil.propertiesWithPrefix(properties, prefix));
     return result;
   }
 
diff --git a/core/src/main/java/org/apache/iceberg/rest/RESTUtil.java 
b/core/src/main/java/org/apache/iceberg/rest/RESTUtil.java
index d31260a918..6ca04e74d9 100644
--- a/core/src/main/java/org/apache/iceberg/rest/RESTUtil.java
+++ b/core/src/main/java/org/apache/iceberg/rest/RESTUtil.java
@@ -28,7 +28,7 @@ import 
org.apache.iceberg.relocated.com.google.common.base.Preconditions;
 import org.apache.iceberg.relocated.com.google.common.base.Splitter;
 import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
 import org.apache.iceberg.relocated.com.google.common.collect.Iterables;
-import org.apache.iceberg.relocated.com.google.common.collect.Maps;
+import org.apache.iceberg.util.PropertyUtil;
 
 public class RESTUtil {
   private static final char NAMESPACE_SEPARATOR = '\u001f';
@@ -98,15 +98,7 @@ public class RESTUtil {
   public static Map<String, String> extractPrefixMap(
       Map<String, String> properties, String prefix) {
     Preconditions.checkNotNull(properties, "Invalid properties map: null");
-    Map<String, String> result = Maps.newHashMap();
-    properties.forEach(
-        (key, value) -> {
-          if (key != null && key.startsWith(prefix)) {
-            result.put(key.substring(prefix.length()), value);
-          }
-        });
-
-    return result;
+    return PropertyUtil.propertiesWithPrefix(properties, prefix);
   }
 
   private static final Joiner.MapJoiner FORM_JOINER = 
Joiner.on("&").withKeyValueSeparator("=");
diff --git 
a/flink/v2.1/flink/src/main/java/org/apache/iceberg/flink/maintenance/api/RewriteDataFilesConfig.java
 
b/flink/v2.1/flink/src/main/java/org/apache/iceberg/flink/maintenance/api/RewriteDataFilesConfig.java
index b2fb83b75b..1a4645cb2b 100644
--- 
a/flink/v2.1/flink/src/main/java/org/apache/iceberg/flink/maintenance/api/RewriteDataFilesConfig.java
+++ 
b/flink/v2.1/flink/src/main/java/org/apache/iceberg/flink/maintenance/api/RewriteDataFilesConfig.java
@@ -19,14 +19,13 @@
 package org.apache.iceberg.flink.maintenance.api;
 
 import java.util.Map;
-import java.util.stream.Collectors;
 import org.apache.flink.configuration.ConfigOption;
 import org.apache.flink.configuration.ConfigOptions;
 import org.apache.flink.configuration.ReadableConfig;
 import org.apache.iceberg.Table;
 import org.apache.iceberg.actions.RewriteDataFiles;
 import org.apache.iceberg.flink.FlinkConfParser;
-import org.apache.iceberg.relocated.com.google.common.collect.Maps;
+import org.apache.iceberg.util.PropertyUtil;
 
 public class RewriteDataFilesConfig {
   public static final String PREFIX = FlinkMaintenanceConfig.PREFIX + 
"rewrite.";
@@ -172,13 +171,6 @@ public class RewriteDataFilesConfig {
   }
 
   public Map<String, String> properties() {
-    return writeProperties.entrySet().stream()
-        .filter(entry -> entry.getKey().startsWith(PREFIX))
-        .collect(
-            Collectors.toMap(
-                entry -> entry.getKey().substring(PREFIX.length()),
-                Map.Entry::getValue,
-                (existing, replacement) -> existing,
-                Maps::newHashMap));
+    return PropertyUtil.propertiesWithPrefix(writeProperties, PREFIX);
   }
 }

Reply via email to