This is an automated email from the ASF dual-hosted git repository.
dkuzmenko pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new c7694fdd9b0 Revert "HIVE-29035: Fixing cache handling for REST catalog
(#5882)"
c7694fdd9b0 is described below
commit c7694fdd9b0e75a978e4e1f334f878c77f8ac7d4
Author: Denys Kuzmenko <[email protected]>
AuthorDate: Mon Aug 11 11:35:35 2025 +0300
Revert "HIVE-29035: Fixing cache handling for REST catalog (#5882)"
This reverts commit 9234ddd400e709b650023bbf48d2f7b67dffd8f1.
---
.../java/org/apache/iceberg/hive/HiveCatalog.java | 36 ++----------
.../hadoop/hive/metastore/conf/MetastoreConf.java | 2 +-
.../org/apache/iceberg/rest/HMSCachingCatalog.java | 65 ++--------------------
.../org/apache/iceberg/rest/HMSCatalogFactory.java | 15 ++---
.../hadoop/hive/metastore/HiveMetaStore.java | 1 -
.../hadoop/hive/metastore/ServletSecurity.java | 10 ----
6 files changed, 18 insertions(+), 111 deletions(-)
diff --git
a/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/HiveCatalog.java
b/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/HiveCatalog.java
index ce30a647c74..72280449ad5 100644
---
a/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/HiveCatalog.java
+++
b/iceberg/iceberg-catalog/src/main/java/org/apache/iceberg/hive/HiveCatalog.java
@@ -21,7 +21,6 @@
import java.util.List;
import java.util.Map;
-import java.util.Objects;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.hadoop.conf.Configurable;
@@ -32,7 +31,6 @@
import org.apache.hadoop.hive.metastore.TableType;
import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
import org.apache.hadoop.hive.metastore.api.Database;
-import org.apache.hadoop.hive.metastore.api.GetTableRequest;
import org.apache.hadoop.hive.metastore.api.InvalidOperationException;
import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
import org.apache.hadoop.hive.metastore.api.PrincipalType;
@@ -410,46 +408,23 @@ private void validateTableIsIcebergTableOrView(
*/
@Override
public boolean tableExists(TableIdentifier identifier) {
- return Objects.nonNull(fetchTable(identifier));
- }
-
- /**
- * Check whether table or metadata table exists and return its location.
- *
- * <p>Note: If a hive table with the same identifier exists in catalog, this
method will return
- * {@code null}.
- *
- * @param identifier a table identifier
- * @return the location of the table if it exists, null otherwise
- */
- public String getTableLocation(TableIdentifier identifier) {
- Table table = fetchTable(identifier);
- if (table == null) {
- return null;
- }
- return
table.getParameters().get(BaseMetastoreTableOperations.METADATA_LOCATION_PROP);
- }
-
- private Table fetchTable(TableIdentifier identifier) {
TableIdentifier baseTableIdentifier = identifier;
if (!isValidIdentifier(identifier)) {
if (!isValidMetadataIdentifier(identifier)) {
- return null;
+ return false;
} else {
baseTableIdentifier =
TableIdentifier.of(identifier.namespace().levels());
}
}
+
String database = baseTableIdentifier.namespace().level(0);
String tableName = baseTableIdentifier.name();
try {
- GetTableRequest request = new GetTableRequest();
- request.setDbName(database);
- request.setTblName(tableName);
- Table table = clients.run(client -> client.getTable(request));
+ Table table = clients.run(client -> client.getTable(database,
tableName));
HiveOperationsBase.validateTableIsIceberg(table, fullTableName(name,
baseTableIdentifier));
- return table;
+ return true;
} catch (NoSuchTableException | NoSuchObjectException e) {
- return null;
+ return false;
} catch (TException e) {
throw new RuntimeException("Failed to check table existence of " +
baseTableIdentifier, e);
} catch (InterruptedException e) {
@@ -459,7 +434,6 @@ private Table fetchTable(TableIdentifier identifier) {
}
}
-
@Override
public boolean viewExists(TableIdentifier viewIdentifier) {
if (!isValidIdentifier(viewIdentifier)) {
diff --git
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
index ae7f37fb500..521cbcc2eac 100644
---
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
+++
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java
@@ -1880,7 +1880,7 @@ public enum ConfVars {
"HMS Iceberg Catalog servlet path component of URL endpoint."
),
ICEBERG_CATALOG_CACHE_EXPIRY("metastore.iceberg.catalog.cache.expiry",
- "hive.metastore.iceberg.catalog.cache.expiry", 600_000L,
+ "hive.metastore.iceberg.catalog.cache.expiry", -1,
"HMS Iceberg Catalog cache expiry."
),
HTTPSERVER_THREADPOOL_MIN("hive.metastore.httpserver.threadpool.min",
diff --git
a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
index 53d9d60faf4..edb5fbd41a9 100644
---
a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
+++
b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCachingCatalog.java
@@ -19,20 +19,12 @@
package org.apache.iceberg.rest;
-import com.github.benmanes.caffeine.cache.Cache;
import com.github.benmanes.caffeine.cache.Ticker;
import java.util.List;
import java.util.Map;
import java.util.Set;
-
-import org.apache.iceberg.BaseMetadataTable;
import org.apache.iceberg.CachingCatalog;
-import org.apache.iceberg.HasTableOperations;
-import org.apache.iceberg.MetadataTableType;
-import org.apache.iceberg.MetadataTableUtils;
import org.apache.iceberg.Schema;
-import org.apache.iceberg.Table;
-import org.apache.iceberg.TableOperations;
import org.apache.iceberg.catalog.Catalog;
import org.apache.iceberg.catalog.Namespace;
import org.apache.iceberg.catalog.SupportsNamespaces;
@@ -43,22 +35,24 @@
import org.apache.iceberg.hive.HiveCatalog;
import org.apache.iceberg.view.View;
import org.apache.iceberg.view.ViewBuilder;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Class that wraps an Iceberg Catalog to cache tables.
*/
public class HMSCachingCatalog extends CachingCatalog implements
SupportsNamespaces, ViewCatalog {
- private static final Logger LOG =
LoggerFactory.getLogger(HMSCachingCatalog.class);
private final HiveCatalog hiveCatalog;
public HMSCachingCatalog(HiveCatalog catalog, long expiration) {
- super(catalog, false, expiration, Ticker.systemTicker());
+ super(catalog, true, expiration, Ticker.systemTicker());
this.hiveCatalog = catalog;
}
+ @Override
+ public Catalog.TableBuilder buildTable(TableIdentifier identifier, Schema
schema) {
+ return hiveCatalog.buildTable(identifier, schema);
+ }
+
@Override
public void createNamespace(Namespace nmspc, Map<String, String> map) {
hiveCatalog.createNamespace(nmspc, map);
@@ -69,48 +63,6 @@ public List<Namespace> listNamespaces(Namespace nmspc)
throws NoSuchNamespaceExc
return hiveCatalog.listNamespaces(nmspc);
}
- @Override
- public Table loadTable(TableIdentifier identifier) {
- final Cache<TableIdentifier, Table> cache = this.tableCache;
- final HiveCatalog catalog = this.hiveCatalog;
- final TableIdentifier canonicalized = identifier.toLowerCase();
- Table cachedTable = cache.getIfPresent(canonicalized);
- if (cachedTable != null) {
- String location = catalog.getTableLocation(canonicalized);
- if (location == null) {
- LOG.debug("Table {} has no location, returning cached table without
location", canonicalized);
- } else if (!location.equals(cachedTable.location())) {
- LOG.debug("Cached table {} has a different location than the one in
the catalog: {} != {}",
- canonicalized, cachedTable.location(), location);
- // Invalidate the cached table if the location is different
- invalidateTable(canonicalized);
- } else {
- LOG.debug("Returning cached table: {}", canonicalized);
- return cachedTable;
- }
- }
- Table table = cache.get(canonicalized, catalog::loadTable);
- if (table instanceof BaseMetadataTable) {
- // Cache underlying table
- TableIdentifier originTableIdentifier =
- TableIdentifier.of(canonicalized.namespace().levels());
- Table originTable = cache.get(originTableIdentifier, catalog::loadTable);
- // Share TableOperations instance of origin table for all metadata
tables, so that metadata
- // table instances are refreshed as well when origin table instance is
refreshed.
- if (originTable instanceof HasTableOperations) {
- TableOperations ops = ((HasTableOperations) originTable).operations();
- MetadataTableType type = MetadataTableType.from(canonicalized.name());
-
- Table metadataTable =
- MetadataTableUtils.createMetadataTableInstance(
- ops, catalog.name(), originTableIdentifier,
canonicalized, type);
- cache.put(canonicalized, metadataTable);
- return metadataTable;
- }
- }
- return table;
- }
-
@Override
public Map<String, String> loadNamespaceMetadata(Namespace nmspc) throws
NoSuchNamespaceException {
return hiveCatalog.loadNamespaceMetadata(nmspc);
@@ -140,11 +92,6 @@ public boolean namespaceExists(Namespace namespace) {
return hiveCatalog.namespaceExists(namespace);
}
- @Override
- public Catalog.TableBuilder buildTable(TableIdentifier identifier, Schema
schema) {
- return hiveCatalog.buildTable(identifier, schema);
- }
-
@Override
public List<TableIdentifier> listViews(Namespace namespace) {
return hiveCatalog.listViews(namespace);
diff --git
a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
index d30fee989de..682e7c9e264 100644
---
a/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
+++
b/standalone-metastore/metastore-rest-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogFactory.java
@@ -33,10 +33,8 @@
/**
* Catalog & servlet factory.
- * <p>This class is derivable on purpose; the factory class name is a
configuration property, this class
- * can serve as a base for specialization.</p>
*/
-public final class HMSCatalogFactory {
+public class HMSCatalogFactory {
private static final String SERVLET_ID_KEY =
"metastore.in.test.iceberg.catalog.servlet.id";
private final Configuration configuration;
@@ -54,12 +52,12 @@ private HMSCatalogFactory(Configuration conf) {
path = MetastoreConf.getVar(conf,
MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PATH);
this.configuration = conf;
}
-
- private int getPort() {
+
+ public int getPort() {
return port;
}
-
- private String getPath() {
+
+ public String getPath() {
return path;
}
@@ -112,8 +110,7 @@ private HttpServlet createServlet(Catalog catalog) {
*/
private HttpServlet createServlet() {
if (port >= 0 && path != null && !path.isEmpty()) {
- Catalog actualCatalog = createCatalog();
- return createServlet(actualCatalog);
+ return createServlet(createCatalog());
}
return null;
}
diff --git
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index e3d0958c503..180baef67bf 100644
---
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -670,7 +670,6 @@ private static void
constraintHttpMethods(ServletContextHandler ctxHandler, bool
}
ctxHandler.setSecurityHandler(securityHandler);
}
-
/**
* Start Metastore based on a passed {@link HadoopThriftAuthBridge}.
*
diff --git
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
index 8afc689d010..677e814e8c1 100644
---
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
+++
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/ServletSecurity.java
@@ -19,7 +19,6 @@
import com.google.common.base.Preconditions;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hive.metastore.auth.HttpAuthenticationException;
import org.apache.hadoop.hive.metastore.auth.jwt.JWTValidator;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
@@ -239,15 +238,6 @@ public void execute(HttpServletRequest request,
HttpServletResponse response, Me
Thread.currentThread().interrupt();
} catch (RuntimeException e) {
throw new IOException("Exception when executing http request as user: "+
clientUgi.getUserName(), e);
- } finally {
- try {
- FileSystem.closeAllForUGI(clientUgi);
- if (LOG.isDebugEnabled()) {
- LOG.debug("Successfully cleaned up FileSystem handles for user: {}",
clientUgi.getUserName());
- }
- } catch (IOException cleanupException) {
- LOG.error("Failed to clean up FileSystem handles for UGI: {}",
clientUgi, cleanupException);
- }
}
}