ayushtkn commented on code in PR #5606:
URL: https://github.com/apache/hive/pull/5606#discussion_r1948027716


##########
standalone-metastore/metastore-catalog/pom.xml:
##########
@@ -0,0 +1,340 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+      http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  <parent>
+    <artifactId>hive-standalone-metastore</artifactId>
+    <groupId>org.apache.hive</groupId>
+    <version>4.1.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hive-standalone-metastore-icecat</artifactId>
+  <name>Hive Metastore Iceberg Catalog</name>
+  <properties>
+    <standalone.metastore.path.to.root>..</standalone.metastore.path.to.root>
+    <maven.compiler.source>8</maven.compiler.source>
+    <maven.compiler.target>8</maven.compiler.target>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <log4j2.debug>false</log4j2.debug>
+    <hive.version>${project.parent.version}</hive.version>
+    <revision>${hive.version}</revision>
+    <iceberg.version>1.6.1</iceberg.version>
+  </properties>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-standalone-metastore-server</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-standalone-metastore-common</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-iceberg-shading</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-iceberg-handler</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-iceberg-catalog</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.iceberg</groupId>
+      <artifactId>iceberg-bundled-guava</artifactId>
+      <version>${iceberg.version}</version>
+    </dependency>
+    <!-- Intellij dislikes shaded Iceberg: uncomment the following 2 blocks 
helps when coding/debugging -->
+    <!--
+    <dependency>
+      <groupId>org.apache.iceberg</groupId>
+      <artifactId>iceberg-api</artifactId>
+      <version>${iceberg.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.iceberg</groupId>
+      <artifactId>iceberg-core</artifactId>
+      <version>${iceberg.version}</version>
+      <optional>true</optional>
+    </dependency>
+    -->
+    <dependency>
+      <groupId>org.apache.httpcomponents.core5</groupId>
+      <artifactId>httpcore5</artifactId>
+      <version>5.2</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.github.tomakehurst</groupId>
+      <artifactId>wiremock-jre8-standalone</artifactId>
+      <version>2.32.0</version>

Review Comment:
   Can you define a variable for it or use existing if already in the project 
for this dependency + others added in this pom, makes things easy during 
upgrade  + we need to see the versions of thirdparty libs are in sync across 
the project



##########
standalone-metastore/metastore-catalog/pom.xml:
##########
@@ -0,0 +1,340 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+      http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  <parent>
+    <artifactId>hive-standalone-metastore</artifactId>
+    <groupId>org.apache.hive</groupId>
+    <version>4.1.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hive-standalone-metastore-icecat</artifactId>
+  <name>Hive Metastore Iceberg Catalog</name>
+  <properties>
+    <standalone.metastore.path.to.root>..</standalone.metastore.path.to.root>
+    <maven.compiler.source>8</maven.compiler.source>
+    <maven.compiler.target>8</maven.compiler.target>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <log4j2.debug>false</log4j2.debug>
+    <hive.version>${project.parent.version}</hive.version>
+    <revision>${hive.version}</revision>
+    <iceberg.version>1.6.1</iceberg.version>
+  </properties>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-standalone-metastore-server</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-standalone-metastore-common</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-iceberg-shading</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-iceberg-handler</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-iceberg-catalog</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.iceberg</groupId>
+      <artifactId>iceberg-bundled-guava</artifactId>
+      <version>${iceberg.version}</version>
+    </dependency>
+    <!-- Intellij dislikes shaded Iceberg: uncomment the following 2 blocks 
helps when coding/debugging -->
+    <!--
+    <dependency>
+      <groupId>org.apache.iceberg</groupId>
+      <artifactId>iceberg-api</artifactId>
+      <version>${iceberg.version}</version>
+      <optional>true</optional>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.iceberg</groupId>
+      <artifactId>iceberg-core</artifactId>
+      <version>${iceberg.version}</version>
+      <optional>true</optional>
+    </dependency>
+    -->
+    <dependency>
+      <groupId>org.apache.httpcomponents.core5</groupId>
+      <artifactId>httpcore5</artifactId>
+      <version>5.2</version>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>com.github.tomakehurst</groupId>
+      <artifactId>wiremock-jre8-standalone</artifactId>
+      <version>2.32.0</version>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>org.assertj</groupId>
+      <artifactId>assertj-core</artifactId>
+      <version>3.19.0</version>

Review Comment:
   the project defines assertj-core dependency in other modules as well, can 
you use that and maybe directly hardcoding the number use the variable, if it 
ain't accessible maybe put that variable in some common place & use it 
everywhere, makes easy for upgrades



##########
standalone-metastore/metastore-catalog/pom.xml:
##########
@@ -0,0 +1,340 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+      http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  <parent>
+    <artifactId>hive-standalone-metastore</artifactId>
+    <groupId>org.apache.hive</groupId>
+    <version>4.1.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hive-standalone-metastore-icecat</artifactId>
+  <name>Hive Metastore Iceberg Catalog</name>
+  <properties>
+    <standalone.metastore.path.to.root>..</standalone.metastore.path.to.root>
+    <maven.compiler.source>8</maven.compiler.source>
+    <maven.compiler.target>8</maven.compiler.target>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <log4j2.debug>false</log4j2.debug>
+    <hive.version>${project.parent.version}</hive.version>

Review Comment:
   These are all defined in the parent pom, I don't think we need to redefine 
them, they can be used directly here as well



##########
standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java:
##########
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import java.io.IOException;
+import java.lang.ref.Reference;
+import java.lang.ref.SoftReference;
+import java.util.Collections;
+import java.util.Map;
+import java.util.TreeMap;
+import javax.servlet.http.HttpServlet;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.SecureServletCaller;
+import org.apache.hadoop.hive.metastore.ServletSecurity;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.iceberg.HiveCachingCatalog;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.hive.HiveCatalog;
+import org.eclipse.jetty.server.ConnectionFactory;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.gzip.GzipHandler;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class HMSCatalogServer {
+  private static final String CACHE_EXPIRY = 
"hive.metastore.catalog.cache.expiry";
+  private static final String JETTY_THREADPOOL_MIN = 
"hive.metastore.catalog.jetty.threadpool.min";
+  private static final String JETTY_THREADPOOL_MAX = 
"hive.metastore.catalog.jetty.threadpool.max";
+  private static final String JETTY_THREADPOOL_IDLE = 
"hive.metastore.catalog.jetty.threadpool.idle";
+  private static final Logger LOG = 
LoggerFactory.getLogger(HMSCatalogServer.class);
+  private static Reference<Catalog> catalogRef;
+
+  static Catalog getLastCatalog() {
+    return catalogRef != null ? catalogRef.get() :  null;
+  }
+
+  private HMSCatalogServer() {
+    // nothing
+  }
+
+  public static HttpServlet createServlet(SecureServletCaller security, 
Catalog catalog) throws IOException {
+    return new HMSCatalogServlet(security, new HMSCatalogAdapter(catalog));
+  }
+
+  public static Catalog createCatalog(Configuration configuration) {
+    final String curi = 
configuration.get(MetastoreConf.ConfVars.THRIFT_URIS.getVarname());
+    final String cwarehouse = 
configuration.get(MetastoreConf.ConfVars.WAREHOUSE.getVarname());
+    final String cextwarehouse = 
configuration.get(MetastoreConf.ConfVars.WAREHOUSE_EXTERNAL.getVarname());
+    MetastoreConf.setVar(configuration, MetastoreConf.ConfVars.THRIFT_URIS, 
"");
+    MetastoreConf.setVar(configuration, 
MetastoreConf.ConfVars.HMS_HANDLER_CREATE, "newHMSRetryingLocalHandler");
+    final HiveCatalog catalog = new org.apache.iceberg.hive.HiveCatalog();
+    catalog.setConf(configuration);
+    Map<String, String> properties = new TreeMap<>();
+    if (curi != null) {
+      properties.put("uri", curi);
+    }
+    if (cwarehouse != null) {
+      properties.put("warehouse", cwarehouse);
+    }
+    if (cextwarehouse != null) {
+      properties.put("external-warehouse", cextwarehouse);
+    }
+    catalog.initialize("hive", properties);
+    long expiry = configuration.getLong(CACHE_EXPIRY, 60_000L);
+    return expiry > 0? HiveCachingCatalog.wrap(catalog, expiry) : catalog;
+  }
+
+  public static HttpServlet createServlet(Configuration configuration, Catalog 
catalog) throws IOException {
+    String auth = MetastoreConf.getVar(configuration, 
MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_AUTH);
+    boolean jwt = "jwt".equalsIgnoreCase(auth);
+    SecureServletCaller security = new ServletSecurity(configuration, jwt);
+    Catalog actualCatalog = catalog;
+    if (actualCatalog == null) {
+      actualCatalog = createCatalog(configuration);
+      actualCatalog.initialize("hive", Collections.emptyMap());

Review Comment:
   Can we use ``DEFAULT_CATALOG_NAME`` instead, here + above



##########
standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java:
##########
@@ -0,0 +1,425 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.MetricRegistry;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.ok;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.ToNumberPolicy;
+import com.google.gson.ToNumberStrategy;
+import com.nimbusds.jose.JWSAlgorithm;
+import com.nimbusds.jose.JWSHeader;
+import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.crypto.RSASSASigner;
+import com.nimbusds.jose.jwk.RSAKey;
+import com.nimbusds.jwt.JWTClaimsSet;
+import com.nimbusds.jwt.SignedJWT;
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Collections;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaException;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo;
+import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.metrics.Metrics;
+import org.apache.hadoop.hive.metastore.properties.HMSPropertyManager;
+import org.apache.hadoop.hive.metastore.properties.PropertyManager;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.catalog.SupportsNamespaces;
+import org.eclipse.jetty.server.Server;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class HMSTestBase {
+  protected static final Logger LOG = 
LoggerFactory.getLogger(HMSTestBase.class.getName());
+  protected static final String BASE_DIR = System.getProperty("basedir");
+  protected static Random RND = new Random(20230922);
+  protected static final String USER_1 = "USER_1";
+  protected static final String DB_NAME = "hivedb";
+
+  protected static final long EVICTION_INTERVAL = 
TimeUnit.SECONDS.toMillis(10);

Review Comment:
   ununsed variable, we should remove it



##########
standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java:
##########
@@ -0,0 +1,425 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.MetricRegistry;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.ok;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.ToNumberPolicy;
+import com.google.gson.ToNumberStrategy;
+import com.nimbusds.jose.JWSAlgorithm;
+import com.nimbusds.jose.JWSHeader;
+import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.crypto.RSASSASigner;
+import com.nimbusds.jose.jwk.RSAKey;
+import com.nimbusds.jwt.JWTClaimsSet;
+import com.nimbusds.jwt.SignedJWT;
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Collections;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaException;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo;
+import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.metrics.Metrics;
+import org.apache.hadoop.hive.metastore.properties.HMSPropertyManager;
+import org.apache.hadoop.hive.metastore.properties.PropertyManager;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.catalog.SupportsNamespaces;
+import org.eclipse.jetty.server.Server;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class HMSTestBase {
+  protected static final Logger LOG = 
LoggerFactory.getLogger(HMSTestBase.class.getName());
+  protected static final String BASE_DIR = System.getProperty("basedir");
+  protected static Random RND = new Random(20230922);
+  protected static final String USER_1 = "USER_1";
+  protected static final String DB_NAME = "hivedb";
+
+  protected static final long EVICTION_INTERVAL = 
TimeUnit.SECONDS.toMillis(10);
+  private static final File JWT_AUTHKEY_FILE =
+      new File(BASE_DIR,"src/test/resources/auth/jwt/jwt-authorized-key.json");
+  protected static final File JWT_NOAUTHKEY_FILE =

Review Comment:
   unused



##########
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java:
##########
@@ -732,10 +766,26 @@ public static void startMetaStore(int port, 
HadoopThriftAuthBridge bridge,
     }
     // optionally create and start the property server and servlet
     propertyServer = PropertyServlet.startServer(conf);
+    // optionally create and start the Iceberg REST server and servlet
+    icebergServer = startIcebergCatalog(conf);
 
     thriftServer.start();
   }
 
+  static Server startIcebergCatalog(Configuration configuration) {
+    try {
+      Class<?> iceClazz = 
Class.forName("org.apache.iceberg.rest.HMSCatalogServer");
+      Method iceStart = iceClazz.getMethod("startServer", Configuration.class);
+      return (Server) iceStart.invoke(null, configuration);
+    } catch (ClassNotFoundException xnf) {
+      LOG.warn("unable to start Iceberg REST Catalog server {}, missing jar?", 
xnf);
+      return null;
+    } catch (NoSuchMethodException | IllegalAccessException | 
InvocationTargetException e) {

Review Comment:
   why don't you just catch `Exception`, is there any case where you think it 
should propagate & crash?



##########
standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStoreClient.java:
##########
@@ -282,7 +282,7 @@ public Void run() throws Exception {
     open();
   }
 
-  /**
+    /**
    * Instantiate the metastore server handler directly instead of connecting

Review Comment:
   nit, it was correct earlier



##########
standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java:
##########
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import java.io.IOException;
+import java.lang.ref.Reference;
+import java.lang.ref.SoftReference;
+import java.util.Collections;
+import java.util.Map;
+import java.util.TreeMap;
+import javax.servlet.http.HttpServlet;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.SecureServletCaller;
+import org.apache.hadoop.hive.metastore.ServletSecurity;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.iceberg.HiveCachingCatalog;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.hive.HiveCatalog;
+import org.eclipse.jetty.server.ConnectionFactory;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.gzip.GzipHandler;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class HMSCatalogServer {
+  private static final String CACHE_EXPIRY = 
"hive.metastore.catalog.cache.expiry";
+  private static final String JETTY_THREADPOOL_MIN = 
"hive.metastore.catalog.jetty.threadpool.min";
+  private static final String JETTY_THREADPOOL_MAX = 
"hive.metastore.catalog.jetty.threadpool.max";
+  private static final String JETTY_THREADPOOL_IDLE = 
"hive.metastore.catalog.jetty.threadpool.idle";
+  private static final Logger LOG = 
LoggerFactory.getLogger(HMSCatalogServer.class);
+  private static Reference<Catalog> catalogRef;
+
+  static Catalog getLastCatalog() {
+    return catalogRef != null ? catalogRef.get() :  null;
+  }
+
+  private HMSCatalogServer() {
+    // nothing
+  }
+
+  public static HttpServlet createServlet(SecureServletCaller security, 
Catalog catalog) throws IOException {

Review Comment:
   this doesn't throws IOE



##########
standalone-metastore/metastore-catalog/pom.xml:
##########
@@ -0,0 +1,340 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+      http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  <parent>
+    <artifactId>hive-standalone-metastore</artifactId>
+    <groupId>org.apache.hive</groupId>
+    <version>4.1.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hive-standalone-metastore-icecat</artifactId>
+  <name>Hive Metastore Iceberg Catalog</name>
+  <properties>
+    <standalone.metastore.path.to.root>..</standalone.metastore.path.to.root>
+    <maven.compiler.source>8</maven.compiler.source>
+    <maven.compiler.target>8</maven.compiler.target>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <log4j2.debug>false</log4j2.debug>
+    <hive.version>${project.parent.version}</hive.version>
+    <revision>${hive.version}</revision>
+    <iceberg.version>1.6.1</iceberg.version>
+  </properties>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-standalone-metastore-server</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-standalone-metastore-common</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-iceberg-shading</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-iceberg-handler</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-iceberg-catalog</artifactId>
+      <version>${revision}</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.iceberg</groupId>
+      <artifactId>iceberg-bundled-guava</artifactId>
+      <version>${iceberg.version}</version>
+    </dependency>
+    <!-- Intellij dislikes shaded Iceberg: uncomment the following 2 blocks 
helps when coding/debugging -->

Review Comment:
   This maybe we can remove there is way to get over this. It is basically 
after building add the iceberg-shaded jar as a dependency to the module in 
intellij module options or something like that



##########
standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/HiveCachingCatalog.java:
##########
@@ -0,0 +1,331 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg;
+
+import com.github.benmanes.caffeine.cache.Cache;
+import com.github.benmanes.caffeine.cache.Caffeine;
+import com.github.benmanes.caffeine.cache.RemovalCause;
+import com.github.benmanes.caffeine.cache.RemovalListener;
+import com.github.benmanes.caffeine.cache.Ticker;
+import java.time.Duration;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.atomic.AtomicBoolean;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.catalog.Namespace;
+import org.apache.iceberg.catalog.SupportsNamespaces;
+import org.apache.iceberg.catalog.TableIdentifier;
+import org.apache.iceberg.exceptions.AlreadyExistsException;
+import org.apache.iceberg.exceptions.NamespaceNotEmptyException;
+import org.apache.iceberg.exceptions.NoSuchNamespaceException;
+import org.apache.iceberg.relocated.com.google.common.base.Preconditions;
+import org.apache.iceberg.relocated.com.google.common.collect.ImmutableList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+
+/**
+ * Class that wraps an Iceberg Catalog to cache tables.
+ * Initial code in:
+ * 
https://github.com/apache/iceberg/blob/1.3.x/core/src/main/java/org/apache/iceberg/CachingCatalog.java
+ * Main difference is the SupportsNamespace and the fact that loadTable 
performs a metadata refresh.
+ *
+ * <p>See {@link CatalogProperties#CACHE_EXPIRATION_INTERVAL_MS} for more 
details regarding special
+ * values for {@code expirationIntervalMillis}.
+ */
+public class HiveCachingCatalog<CATALOG extends Catalog & SupportsNamespaces> 
implements Catalog, SupportsNamespaces {

Review Comment:
   We should extend ``CachingCatalog`` and implement the methods modified + the 
new methods in ``SupportsNamespaces`` rather than copying everything to hive



##########
standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java:
##########
@@ -0,0 +1,288 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.io.UncheckedIOException;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Consumer;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.hive.metastore.SecureServletCaller;
+import org.apache.hc.core5.http.ContentType;
+import org.apache.hc.core5.http.HttpHeaders;
+import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
+import org.apache.iceberg.relocated.com.google.common.io.CharStreams;
+import org.apache.iceberg.rest.HMSCatalogAdapter.HTTPMethod;
+import org.apache.iceberg.rest.HMSCatalogAdapter.Route;
+import org.apache.iceberg.rest.responses.ErrorResponse;
+import org.apache.iceberg.util.Pair;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Original @ 
https://github.com/apache/iceberg/blob/main/core/src/test/java/org/apache/iceberg/rest/RESTCatalogServlet.java
+ * The RESTCatalogServlet provides a servlet implementation used in 
combination with a
+ * RESTCatalogAdaptor to proxy the REST Spec to any Catalog implementation.
+ */
+public class HMSCatalogServlet extends HttpServlet {
+  private static final Logger LOG = 
LoggerFactory.getLogger(HMSCatalogServlet.class);
+  /**
+   * The security.
+   */
+  private final SecureServletCaller security;
+
+  private final HMSCatalogAdapter restCatalogAdapter;
+  private final Map<String, String> responseHeaders =
+      ImmutableMap.of(HttpHeaders.CONTENT_TYPE, 
ContentType.APPLICATION_JSON.getMimeType());
+
+  public HMSCatalogServlet(SecureServletCaller security, HMSCatalogAdapter 
restCatalogAdapter) {
+    this.security = security;
+    this.restCatalogAdapter = restCatalogAdapter;
+  }
+
+  @Override
+  public void init() throws ServletException {
+    super.init();
+    security.init();
+  }
+  @Override
+  protected void service(HttpServletRequest req, HttpServletResponse resp) 
throws ServletException, IOException {
+    String method = req.getMethod();
+    if (!"PATCH".equals(method)) {
+      super.service(req, resp);
+    } else {
+      this.doPatch(req, resp);
+    }
+  }
+
+  protected void doPatch(HttpServletRequest request, HttpServletResponse 
response)  {
+    try {
+      security.execute(request, response, this::execute);
+    } catch (IOException e) {
+      LOG.error("PATCH failed", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  @Override
+  protected void doGet(HttpServletRequest request, HttpServletResponse 
response) {
+    try {
+      security.execute(request, response, this::execute);
+    } catch (IOException e) {
+      LOG.error("GET failed", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  @Override
+  protected void doPut(HttpServletRequest request, HttpServletResponse 
response) {
+    try {
+      security.execute(request, response, this::execute);
+    } catch (IOException e) {
+      LOG.error("PUT failed", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  @Override
+  protected void doHead(HttpServletRequest request, HttpServletResponse 
response) {
+    try {
+      security.execute(request, response, this::execute);
+    } catch (IOException e) {
+      LOG.error("HEAD failed", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  @Override
+  protected void doPost(HttpServletRequest request, HttpServletResponse 
response) {
+    try {
+      security.execute(request, response, this::execute);
+    } catch (IOException e) {
+      LOG.error("POST failed", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  @Override
+  protected void doDelete(HttpServletRequest request, HttpServletResponse 
response)  {
+    try {
+      security.execute(request, response, this::execute);
+    } catch (IOException e) {
+      LOG.error("DELETE failed", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  private void execute(HttpServletRequest request, HttpServletResponse 
response) {
+    try {
+      ServletRequestContext context = ServletRequestContext.from(request);
+      response.setStatus(HttpServletResponse.SC_OK);
+      responseHeaders.forEach(response::setHeader);
+
+      final Optional<ErrorResponse> error = context.error();
+      if (error.isPresent()) {
+        response.setStatus(HttpServletResponse.SC_BAD_REQUEST);
+        RESTObjectMapper.mapper().writeValue(response.getWriter(), 
error.get());
+        return;
+      }
+      Object responseBody =
+          restCatalogAdapter.execute(
+              context.method(),
+              context.path(),
+              context.queryParams(),
+              context.body(),
+              context.route().responseClass(),
+              context.headers(),
+              handle(response));
+
+      if (responseBody != null) {
+        RESTObjectMapper.mapper().writeValue(response.getWriter(), 
responseBody);
+      }
+    } catch (RuntimeException e) {
+      // should be a RESTException but not able to see them through 
dependencies
+      LOG.error("Error processing REST request", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    } catch (Exception e) {
+      LOG.error("Unexpected exception when processing REST request", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);

Review Comment:
   Doesn't feel there is too much difference in the exception block apart from 
the log, maybe you can manage to live with one catch block only, else I can 
live with it :-) 



##########
standalone-metastore/metastore-catalog/pom.xml:
##########
@@ -0,0 +1,340 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+      http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+  <parent>
+    <artifactId>hive-standalone-metastore</artifactId>
+    <groupId>org.apache.hive</groupId>
+    <version>4.1.0-SNAPSHOT</version>
+  </parent>
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>hive-standalone-metastore-icecat</artifactId>
+  <name>Hive Metastore Iceberg Catalog</name>
+  <properties>
+    <standalone.metastore.path.to.root>..</standalone.metastore.path.to.root>
+    <maven.compiler.source>8</maven.compiler.source>
+    <maven.compiler.target>8</maven.compiler.target>
+    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+    <log4j2.debug>false</log4j2.debug>
+    <hive.version>${project.parent.version}</hive.version>
+    <revision>${hive.version}</revision>

Review Comment:
   there is no need of revision, we can directly use hive.version. And we need 
not to define hive.version over here it is already defined in parent pom, we 
can directly use it



##########
standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java:
##########
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import java.io.IOException;
+import java.lang.ref.Reference;
+import java.lang.ref.SoftReference;
+import java.util.Collections;
+import java.util.Map;
+import java.util.TreeMap;
+import javax.servlet.http.HttpServlet;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.SecureServletCaller;
+import org.apache.hadoop.hive.metastore.ServletSecurity;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.iceberg.HiveCachingCatalog;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.hive.HiveCatalog;
+import org.eclipse.jetty.server.ConnectionFactory;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.gzip.GzipHandler;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class HMSCatalogServer {
+  private static final String CACHE_EXPIRY = 
"hive.metastore.catalog.cache.expiry";
+  private static final String JETTY_THREADPOOL_MIN = 
"hive.metastore.catalog.jetty.threadpool.min";
+  private static final String JETTY_THREADPOOL_MAX = 
"hive.metastore.catalog.jetty.threadpool.max";
+  private static final String JETTY_THREADPOOL_IDLE = 
"hive.metastore.catalog.jetty.threadpool.idle";
+  private static final Logger LOG = 
LoggerFactory.getLogger(HMSCatalogServer.class);
+  private static Reference<Catalog> catalogRef;
+
+  static Catalog getLastCatalog() {
+    return catalogRef != null ? catalogRef.get() :  null;
+  }
+
+  private HMSCatalogServer() {
+    // nothing
+  }
+
+  public static HttpServlet createServlet(SecureServletCaller security, 
Catalog catalog) throws IOException {
+    return new HMSCatalogServlet(security, new HMSCatalogAdapter(catalog));
+  }
+
+  public static Catalog createCatalog(Configuration configuration) {
+    final String curi = 
configuration.get(MetastoreConf.ConfVars.THRIFT_URIS.getVarname());
+    final String cwarehouse = 
configuration.get(MetastoreConf.ConfVars.WAREHOUSE.getVarname());
+    final String cextwarehouse = 
configuration.get(MetastoreConf.ConfVars.WAREHOUSE_EXTERNAL.getVarname());

Review Comment:
   I am not able decode what the prefix `c` means here? Can you maybe use some 
more redable variable names?



##########
standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java:
##########
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import java.io.IOException;
+import java.lang.ref.Reference;
+import java.lang.ref.SoftReference;
+import java.util.Collections;
+import java.util.Map;
+import java.util.TreeMap;
+import javax.servlet.http.HttpServlet;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.SecureServletCaller;
+import org.apache.hadoop.hive.metastore.ServletSecurity;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.iceberg.HiveCachingCatalog;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.hive.HiveCatalog;
+import org.eclipse.jetty.server.ConnectionFactory;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.gzip.GzipHandler;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class HMSCatalogServer {
+  private static final String CACHE_EXPIRY = 
"hive.metastore.catalog.cache.expiry";
+  private static final String JETTY_THREADPOOL_MIN = 
"hive.metastore.catalog.jetty.threadpool.min";
+  private static final String JETTY_THREADPOOL_MAX = 
"hive.metastore.catalog.jetty.threadpool.max";
+  private static final String JETTY_THREADPOOL_IDLE = 
"hive.metastore.catalog.jetty.threadpool.idle";

Review Comment:
   Why don't we define these in the `MetastoreConf`?



##########
standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java:
##########
@@ -0,0 +1,425 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.MetricRegistry;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.ok;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.ToNumberPolicy;
+import com.google.gson.ToNumberStrategy;
+import com.nimbusds.jose.JWSAlgorithm;
+import com.nimbusds.jose.JWSHeader;
+import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.crypto.RSASSASigner;
+import com.nimbusds.jose.jwk.RSAKey;
+import com.nimbusds.jwt.JWTClaimsSet;
+import com.nimbusds.jwt.SignedJWT;
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Collections;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaException;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo;
+import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.metrics.Metrics;
+import org.apache.hadoop.hive.metastore.properties.HMSPropertyManager;
+import org.apache.hadoop.hive.metastore.properties.PropertyManager;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.catalog.SupportsNamespaces;
+import org.eclipse.jetty.server.Server;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class HMSTestBase {
+  protected static final Logger LOG = 
LoggerFactory.getLogger(HMSTestBase.class.getName());
+  protected static final String BASE_DIR = System.getProperty("basedir");
+  protected static Random RND = new Random(20230922);
+  protected static final String USER_1 = "USER_1";
+  protected static final String DB_NAME = "hivedb";
+
+  protected static final long EVICTION_INTERVAL = 
TimeUnit.SECONDS.toMillis(10);
+  private static final File JWT_AUTHKEY_FILE =
+      new File(BASE_DIR,"src/test/resources/auth/jwt/jwt-authorized-key.json");
+  protected static final File JWT_NOAUTHKEY_FILE =
+      new 
File(BASE_DIR,"src/test/resources/auth/jwt/jwt-unauthorized-key.json");
+  protected static final File JWT_JWKS_FILE =
+      new 
File(BASE_DIR,"src/test/resources/auth/jwt/jwt-verification-jwks.json");
+  protected static final int MOCK_JWKS_SERVER_PORT = 8089;
+  @ClassRule
+  public static final WireMockRule MOCK_JWKS_SERVER = new 
WireMockRule(MOCK_JWKS_SERVER_PORT);
+
+
+  public static class TestSchemaInfo extends MetaStoreSchemaInfo {
+    public TestSchemaInfo(String metastoreHome, String dbType) throws 
HiveMetaException {
+      super(metastoreHome, dbType);
+    }
+    @Override
+    public String getMetaStoreScriptDir() {
+      return  new File(BASE_DIR,"src/test/resources").getAbsolutePath() + 
File.separatorChar +
+          "scripts" + File.separatorChar + "metastore" +
+          File.separatorChar + "upgrade" + File.separatorChar + dbType;
+    }
+  }
+
+  @Rule
+  public TemporaryFolder temp = new TemporaryFolder();
+
+  protected Configuration conf = null;
+  protected String NS = "hms" + RND.nextInt(100);
+
+  protected int port = -1;
+  protected int catalogPort = -1;
+  protected final String catalogPath = "hmscatalog";
+  protected static final int WAIT_FOR_SERVER = 5000;
+  // for direct calls
+  protected Catalog catalog;
+  protected SupportsNamespaces nsCatalog;
+
+  protected int createMetastoreServer(Configuration conf) throws Exception {
+    return 
MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), 
conf);
+  }
+
+  protected void stopMetastoreServer(int port) {
+    MetaStoreTestUtils.close(port);
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    NS = "hms" + RND.nextInt(100);
+    conf = MetastoreConf.newMetastoreConf();
+    MetaStoreTestUtils.setConfForStandloneMode(conf);
+    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.CAPABILITY_CHECK, 
false);
+    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST, true);
+    // new 2024-10-02

Review Comment:
   not sure what is this? maybe some leftover, can you clean this up?



##########
standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServer.java:
##########
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import java.io.IOException;
+import java.lang.ref.Reference;
+import java.lang.ref.SoftReference;
+import java.util.Collections;
+import java.util.Map;
+import java.util.TreeMap;
+import javax.servlet.http.HttpServlet;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.SecureServletCaller;
+import org.apache.hadoop.hive.metastore.ServletSecurity;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.iceberg.HiveCachingCatalog;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.hive.HiveCatalog;
+import org.eclipse.jetty.server.ConnectionFactory;
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.HttpConfiguration;
+import org.eclipse.jetty.server.HttpConnectionFactory;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.ServerConnector;
+import org.eclipse.jetty.server.handler.gzip.GzipHandler;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+import org.eclipse.jetty.util.ssl.SslContextFactory;
+import org.eclipse.jetty.util.thread.QueuedThreadPool;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+
+public class HMSCatalogServer {
+  private static final String CACHE_EXPIRY = 
"hive.metastore.catalog.cache.expiry";
+  private static final String JETTY_THREADPOOL_MIN = 
"hive.metastore.catalog.jetty.threadpool.min";
+  private static final String JETTY_THREADPOOL_MAX = 
"hive.metastore.catalog.jetty.threadpool.max";
+  private static final String JETTY_THREADPOOL_IDLE = 
"hive.metastore.catalog.jetty.threadpool.idle";
+  private static final Logger LOG = 
LoggerFactory.getLogger(HMSCatalogServer.class);
+  private static Reference<Catalog> catalogRef;
+
+  static Catalog getLastCatalog() {
+    return catalogRef != null ? catalogRef.get() :  null;
+  }
+
+  private HMSCatalogServer() {
+    // nothing
+  }
+
+  public static HttpServlet createServlet(SecureServletCaller security, 
Catalog catalog) throws IOException {
+    return new HMSCatalogServlet(security, new HMSCatalogAdapter(catalog));
+  }
+
+  public static Catalog createCatalog(Configuration configuration) {
+    final String curi = 
configuration.get(MetastoreConf.ConfVars.THRIFT_URIS.getVarname());
+    final String cwarehouse = 
configuration.get(MetastoreConf.ConfVars.WAREHOUSE.getVarname());
+    final String cextwarehouse = 
configuration.get(MetastoreConf.ConfVars.WAREHOUSE_EXTERNAL.getVarname());
+    MetastoreConf.setVar(configuration, MetastoreConf.ConfVars.THRIFT_URIS, 
"");

Review Comment:
   curious Why do we unset this?



##########
standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java:
##########
@@ -0,0 +1,425 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.MetricRegistry;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.ok;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.ToNumberPolicy;
+import com.google.gson.ToNumberStrategy;
+import com.nimbusds.jose.JWSAlgorithm;
+import com.nimbusds.jose.JWSHeader;
+import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.crypto.RSASSASigner;
+import com.nimbusds.jose.jwk.RSAKey;
+import com.nimbusds.jwt.JWTClaimsSet;
+import com.nimbusds.jwt.SignedJWT;
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Collections;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaException;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo;
+import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.metrics.Metrics;
+import org.apache.hadoop.hive.metastore.properties.HMSPropertyManager;
+import org.apache.hadoop.hive.metastore.properties.PropertyManager;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.catalog.SupportsNamespaces;
+import org.eclipse.jetty.server.Server;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class HMSTestBase {
+  protected static final Logger LOG = 
LoggerFactory.getLogger(HMSTestBase.class.getName());
+  protected static final String BASE_DIR = System.getProperty("basedir");
+  protected static Random RND = new Random(20230922);
+  protected static final String USER_1 = "USER_1";
+  protected static final String DB_NAME = "hivedb";
+
+  protected static final long EVICTION_INTERVAL = 
TimeUnit.SECONDS.toMillis(10);
+  private static final File JWT_AUTHKEY_FILE =
+      new File(BASE_DIR,"src/test/resources/auth/jwt/jwt-authorized-key.json");
+  protected static final File JWT_NOAUTHKEY_FILE =
+      new 
File(BASE_DIR,"src/test/resources/auth/jwt/jwt-unauthorized-key.json");
+  protected static final File JWT_JWKS_FILE =
+      new 
File(BASE_DIR,"src/test/resources/auth/jwt/jwt-verification-jwks.json");
+  protected static final int MOCK_JWKS_SERVER_PORT = 8089;
+  @ClassRule
+  public static final WireMockRule MOCK_JWKS_SERVER = new 
WireMockRule(MOCK_JWKS_SERVER_PORT);
+
+
+  public static class TestSchemaInfo extends MetaStoreSchemaInfo {
+    public TestSchemaInfo(String metastoreHome, String dbType) throws 
HiveMetaException {
+      super(metastoreHome, dbType);
+    }
+    @Override
+    public String getMetaStoreScriptDir() {
+      return  new File(BASE_DIR,"src/test/resources").getAbsolutePath() + 
File.separatorChar +
+          "scripts" + File.separatorChar + "metastore" +
+          File.separatorChar + "upgrade" + File.separatorChar + dbType;
+    }
+  }
+
+  @Rule
+  public TemporaryFolder temp = new TemporaryFolder();
+
+  protected Configuration conf = null;
+  protected String NS = "hms" + RND.nextInt(100);
+
+  protected int port = -1;
+  protected int catalogPort = -1;
+  protected final String catalogPath = "hmscatalog";
+  protected static final int WAIT_FOR_SERVER = 5000;
+  // for direct calls
+  protected Catalog catalog;
+  protected SupportsNamespaces nsCatalog;
+
+  protected int createMetastoreServer(Configuration conf) throws Exception {
+    return 
MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), 
conf);
+  }
+
+  protected void stopMetastoreServer(int port) {
+    MetaStoreTestUtils.close(port);
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    NS = "hms" + RND.nextInt(100);
+    conf = MetastoreConf.newMetastoreConf();
+    MetaStoreTestUtils.setConfForStandloneMode(conf);
+    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.CAPABILITY_CHECK, 
false);
+    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST, true);
+    // new 2024-10-02
+    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.SCHEMA_VERIFICATION, 
false);
+
+    conf.setBoolean(MetastoreConf.ConfVars.METRICS_ENABLED.getVarname(), true);
+    // "hive.metastore.warehouse.dir"
+    String whpath = new 
File(BASE_DIR,"target/tmp/warehouse/managed").toURI()/*.getAbsolutePath()*/.toString();
+    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.WAREHOUSE, whpath);
+    HiveConf.setVar(conf, HiveConf.ConfVars.METASTORE_WAREHOUSE, whpath);
+    // "hive.metastore.warehouse.external.dir"
+    String extwhpath = new 
File(BASE_DIR,"target/tmp/warehouse/external").toURI()/*.getAbsolutePath()*/.toString();

Review Comment:
   can you check the commented lines & cleanup if they aren't related



##########
standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java:
##########
@@ -0,0 +1,288 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.io.UncheckedIOException;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Consumer;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.hive.metastore.SecureServletCaller;
+import org.apache.hc.core5.http.ContentType;
+import org.apache.hc.core5.http.HttpHeaders;
+import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
+import org.apache.iceberg.relocated.com.google.common.io.CharStreams;
+import org.apache.iceberg.rest.HMSCatalogAdapter.HTTPMethod;
+import org.apache.iceberg.rest.HMSCatalogAdapter.Route;
+import org.apache.iceberg.rest.responses.ErrorResponse;
+import org.apache.iceberg.util.Pair;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Original @ 
https://github.com/apache/iceberg/blob/main/core/src/test/java/org/apache/iceberg/rest/RESTCatalogServlet.java
+ * The RESTCatalogServlet provides a servlet implementation used in 
combination with a
+ * RESTCatalogAdaptor to proxy the REST Spec to any Catalog implementation.
+ */
+public class HMSCatalogServlet extends HttpServlet {
+  private static final Logger LOG = 
LoggerFactory.getLogger(HMSCatalogServlet.class);
+  /**
+   * The security.
+   */
+  private final SecureServletCaller security;
+
+  private final HMSCatalogAdapter restCatalogAdapter;
+  private final Map<String, String> responseHeaders =
+      ImmutableMap.of(HttpHeaders.CONTENT_TYPE, 
ContentType.APPLICATION_JSON.getMimeType());
+
+  public HMSCatalogServlet(SecureServletCaller security, HMSCatalogAdapter 
restCatalogAdapter) {
+    this.security = security;
+    this.restCatalogAdapter = restCatalogAdapter;
+  }
+
+  @Override
+  public void init() throws ServletException {
+    super.init();
+    security.init();
+  }
+  @Override
+  protected void service(HttpServletRequest req, HttpServletResponse resp) 
throws ServletException, IOException {
+    String method = req.getMethod();
+    if (!"PATCH".equals(method)) {
+      super.service(req, resp);
+    } else {
+      this.doPatch(req, resp);
+    }
+  }
+
+  protected void doPatch(HttpServletRequest request, HttpServletResponse 
response)  {
+    try {
+      security.execute(request, response, this::execute);
+    } catch (IOException e) {
+      LOG.error("PATCH failed", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  @Override
+  protected void doGet(HttpServletRequest request, HttpServletResponse 
response) {
+    try {
+      security.execute(request, response, this::execute);
+    } catch (IOException e) {
+      LOG.error("GET failed", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  @Override
+  protected void doPut(HttpServletRequest request, HttpServletResponse 
response) {
+    try {
+      security.execute(request, response, this::execute);
+    } catch (IOException e) {
+      LOG.error("PUT failed", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  @Override
+  protected void doHead(HttpServletRequest request, HttpServletResponse 
response) {
+    try {
+      security.execute(request, response, this::execute);
+    } catch (IOException e) {
+      LOG.error("HEAD failed", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  @Override
+  protected void doPost(HttpServletRequest request, HttpServletResponse 
response) {
+    try {
+      security.execute(request, response, this::execute);
+    } catch (IOException e) {
+      LOG.error("POST failed", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }
+  }
+
+  @Override
+  protected void doDelete(HttpServletRequest request, HttpServletResponse 
response)  {
+    try {
+      security.execute(request, response, this::execute);
+    } catch (IOException e) {
+      LOG.error("DELETE failed", e);
+      response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
+    }

Review Comment:
   Can you refactor this, maybe add a method like executeWithSecurity or 
something like this
   ```
     private void executeWithSecurity(HttpServletRequest request, 
HttpServletResponse response, String method) {
       try {
         security.execute(request, response, this::execute);
       } catch (IOException e) {
         LOG.error(method + " failed", e);
         response.setStatus(HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
       }
   ```
   and then just invoke this from all the methods rather than doing the 
redundant stuff



##########
standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogServlet.java:
##########
@@ -0,0 +1,288 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.Reader;
+import java.io.UncheckedIOException;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Optional;
+import java.util.function.Consumer;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.hive.metastore.SecureServletCaller;
+import org.apache.hc.core5.http.ContentType;
+import org.apache.hc.core5.http.HttpHeaders;
+import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
+import org.apache.iceberg.relocated.com.google.common.io.CharStreams;
+import org.apache.iceberg.rest.HMSCatalogAdapter.HTTPMethod;
+import org.apache.iceberg.rest.HMSCatalogAdapter.Route;
+import org.apache.iceberg.rest.responses.ErrorResponse;
+import org.apache.iceberg.util.Pair;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Original @ 
https://github.com/apache/iceberg/blob/main/core/src/test/java/org/apache/iceberg/rest/RESTCatalogServlet.java
+ * The RESTCatalogServlet provides a servlet implementation used in 
combination with a
+ * RESTCatalogAdaptor to proxy the REST Spec to any Catalog implementation.
+ */
+public class HMSCatalogServlet extends HttpServlet {

Review Comment:
   It would be better if we add a dependency to the jar and make this class 
extend ``RESTCatalogServlet`` and just implement the modified methods + the new 
ones rather than copying the entire code here



##########
standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/hive/HiveUtil.java:
##########
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.iceberg.hive;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.iceberg.ClientPool;
+import org.apache.iceberg.Snapshot;
+import org.apache.iceberg.TableMetadata;
+import org.apache.iceberg.catalog.Namespace;
+import org.apache.iceberg.hive.HiveCatalog;
+import org.apache.iceberg.hive.HiveTableOperations;
+import org.apache.iceberg.io.FileIO;
+
+import java.util.Map;
+
+/**
+ * A Friend bridge to Iceberg.
+ */
+public class HiveUtil {
+
+  public static final HiveTableOperations newTableOperations(Configuration 
conf, String catalogName, String database, String table) {
+    return new HiveTableOperations(conf, null, null, catalogName, database, 
table);
+  }
+
+  public static final HiveTableOperations newTableOperations(Configuration 
conf, ClientPool metaClients, FileIO fileIO, String catalogName, String 
database, String table) {
+    return new HiveTableOperations(conf, null, null, catalogName, database, 
table);
+  }
+
+  public static Database convertToDatabase(HiveCatalog catalog, Namespace ns, 
Map<String, String> meta) {
+    return catalog.convertToDatabase(ns, meta);
+  }
+
+  public static void setSnapshotSummary(HiveTableOperations ops, Map<String, 
String> parameters, Snapshot snapshot) {
+    ops.setSnapshotSummary(parameters, snapshot);
+  }
+
+  public static void setSnapshotStats(HiveTableOperations ops, TableMetadata 
metadata, Map<String, String> parameters) {
+    ops.setSnapshotStats(metadata, parameters);
+  }
+
+  public static void setSchema(HiveTableOperations ops, TableMetadata 
metadata, Map<String, String> parameters) {
+    ops.setSchema(metadata.schema(), parameters);
+  }
+
+  public static void setPartitionSpec(HiveTableOperations ops, TableMetadata 
metadata, Map<String, String> parameters) {
+    ops.setPartitionSpec(metadata, parameters);
+  }
+
+  public static void setSortOrder(HiveTableOperations ops, TableMetadata 
metadata, Map<String, String> parameters) {
+    ops.setSortOrder(metadata, parameters);
+  }
+}

Review Comment:
   I think this class isn't used anywhere, we can drop it



##########
standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java:
##########
@@ -0,0 +1,158 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import com.google.gson.Gson;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.iceberg.Schema;
+import org.apache.iceberg.Table;
+import org.apache.iceberg.Transaction;
+import org.apache.iceberg.catalog.Namespace;
+import org.apache.iceberg.catalog.TableIdentifier;
+import org.apache.iceberg.types.Types;
+import static org.apache.iceberg.types.Types.NestedField.required;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.After;
+import org.junit.Test;
+
+public class TestHMSCatalog extends HMSTestBase {
+  public TestHMSCatalog() {
+    super();
+  }
+  
+  @Before
+  @Override
+  public void setUp() throws Exception {
+      super.setUp();
+  }
+  
+  @After
+  @Override
+  public void tearDown() throws Exception {
+      super.tearDown();
+  }
+
+  @Test
+  public void testCreateNamespaceHttp() throws Exception {
+    String ns = "nstesthttp";
+    // list namespaces
+    URL url = new URL("http://hive@localhost:"; + catalogPort + 
"/"+catalogPath+"/v1/namespaces");
+    String jwt = generateJWT();
+    // check namespaces list (ie 0)
+    Object response = clientCall(jwt, url, "GET", null);
+    Assert.assertTrue(response instanceof Map);
+    Map<String, Object> nsrep = (Map<String, Object>) response;
+    List<String> nslist = (List<String>) nsrep.get("namespaces");
+    Assert.assertEquals(2, nslist.size());
+    Assert.assertTrue((nslist.contains(Arrays.asList("default"))));
+    Assert.assertTrue((nslist.contains(Arrays.asList("hivedb"))));
+    // succeed
+    response = clientCall(jwt, url, "POST", false, "{ \"namespace\" : [ 
\""+ns+"\" ], "+
+            "\"properties\":{ \"owner\": \"apache\", \"group\" : \"iceberg\" }"
+            +"}");
+    Assert.assertNotNull(response);
+    HiveMetaStoreClient client = createClient(conf, port);
+    Database database1 = client.getDatabase(ns);
+    Assert.assertEquals("apache", database1.getParameters().get("owner"));
+    Assert.assertEquals("iceberg", database1.getParameters().get("group"));
+
+    List<TableIdentifier> tis = catalog.listTables(Namespace.of(ns));
+    Assert.assertTrue(tis.isEmpty());
+
+    // list tables in hivedb
+    url = new URL("http://hive@localhost:"; + catalogPort + "/" + 
catalogPath+"/v1/namespaces/" + ns + "/tables");
+    // succeed
+    response = clientCall(jwt, url, "GET", null);
+    Assert.assertNotNull(response);
+
+    // quick check on metrics
+    Map<String, Long> counters = reportMetricCounters("list_namespaces", 
"list_tables");
+    counters.entrySet().forEach(m->{
+      Assert.assertTrue(m.getKey(), m.getValue() > 0);
+    });
+  }
+  
+  private Schema getTestSchema() {
+    return new Schema(
+        required(1, "id", Types.IntegerType.get(), "unique ID"),
+        required(2, "data", Types.StringType.get()));
+  }
+
+  
+  @Test
+  public void testCreateTableTxnBuilder() throws Exception {
+    Schema schema = getTestSchema();
+    final String tblName = "tbl_" + Integer.toHexString(RND.nextInt(65536));
+    final TableIdentifier tableIdent = TableIdentifier.of(DB_NAME, tblName);
+    String location = temp.newFolder(tableIdent.toString()).toString();
+
+    try {
+      Transaction txn = catalog.buildTable(tableIdent, schema)
+          .withLocation(location)
+          .createTransaction();
+      txn.commitTransaction();
+      Table table = catalog.loadTable(tableIdent);
+
+      Assert.assertEquals(location, table.location());
+      Assert.assertEquals(2, table.schema().columns().size());
+      Assert.assertTrue(table.spec().isUnpartitioned());
+      List<TableIdentifier> tis = catalog.listTables(Namespace.of(DB_NAME));
+      Assert.assertFalse(tis.isEmpty());
+
+      // list namespaces
+      URL url = new URL("http://hive@localhost:"; + catalogPort + 
"/"+catalogPath+"/v1/namespaces");
+      String jwt = generateJWT();
+      // succeed
+      Object response = clientCall(jwt, url, "GET", null);
+      Assert.assertNotNull(response);
+
+      // list tables in hivedb
+      url = new URL("http://hive@localhost:"; + catalogPort + "/" + 
catalogPath+"/v1/namespaces/" + DB_NAME + "/tables");
+      // succeed
+      response = clientCall(jwt, url, "GET", null);
+      Assert.assertNotNull(response);
+
+      // load table
+      url = new URL("http://hive@localhost:"; + catalogPort + "/" + 
catalogPath+"/v1/namespaces/" + DB_NAME + "/tables/" + tblName);
+      // succeed
+      response = clientCall(jwt, url, "GET", null);
+      Assert.assertNotNull(response);
+      String str = new Gson().toJson(response);
+
+      // quick check on metrics
+      Map<String, Long> counters = reportMetricCounters("list_namespaces", 
"list_tables", "load_table");
+      counters.forEach((key, value) -> Assert.assertTrue(key, value > 0));
+      table = catalog.loadTable(tableIdent);
+      Assert.assertNotNull(table);
+    } catch (Exception xany) {
+        String str = xany.getMessage();
+    } finally {
+        //metastoreClient.dropTable(DB_NAME, tblName);

Review Comment:
   it is a commented code, can you please drop it



##########
standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java:
##########
@@ -0,0 +1,158 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import com.google.gson.Gson;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.iceberg.Schema;
+import org.apache.iceberg.Table;
+import org.apache.iceberg.Transaction;
+import org.apache.iceberg.catalog.Namespace;
+import org.apache.iceberg.catalog.TableIdentifier;
+import org.apache.iceberg.types.Types;
+import static org.apache.iceberg.types.Types.NestedField.required;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.After;
+import org.junit.Test;
+
+public class TestHMSCatalog extends HMSTestBase {
+  public TestHMSCatalog() {
+    super();
+  }
+  
+  @Before
+  @Override
+  public void setUp() throws Exception {
+      super.setUp();
+  }
+  
+  @After
+  @Override
+  public void tearDown() throws Exception {
+      super.tearDown();
+  }
+
+  @Test
+  public void testCreateNamespaceHttp() throws Exception {
+    String ns = "nstesthttp";
+    // list namespaces
+    URL url = new URL("http://hive@localhost:"; + catalogPort + 
"/"+catalogPath+"/v1/namespaces");
+    String jwt = generateJWT();
+    // check namespaces list (ie 0)
+    Object response = clientCall(jwt, url, "GET", null);
+    Assert.assertTrue(response instanceof Map);
+    Map<String, Object> nsrep = (Map<String, Object>) response;
+    List<String> nslist = (List<String>) nsrep.get("namespaces");
+    Assert.assertEquals(2, nslist.size());
+    Assert.assertTrue((nslist.contains(Arrays.asList("default"))));
+    Assert.assertTrue((nslist.contains(Arrays.asList("hivedb"))));
+    // succeed
+    response = clientCall(jwt, url, "POST", false, "{ \"namespace\" : [ 
\""+ns+"\" ], "+
+            "\"properties\":{ \"owner\": \"apache\", \"group\" : \"iceberg\" }"
+            +"}");
+    Assert.assertNotNull(response);
+    HiveMetaStoreClient client = createClient(conf, port);
+    Database database1 = client.getDatabase(ns);
+    Assert.assertEquals("apache", database1.getParameters().get("owner"));
+    Assert.assertEquals("iceberg", database1.getParameters().get("group"));
+
+    List<TableIdentifier> tis = catalog.listTables(Namespace.of(ns));
+    Assert.assertTrue(tis.isEmpty());
+
+    // list tables in hivedb
+    url = new URL("http://hive@localhost:"; + catalogPort + "/" + 
catalogPath+"/v1/namespaces/" + ns + "/tables");
+    // succeed
+    response = clientCall(jwt, url, "GET", null);
+    Assert.assertNotNull(response);
+
+    // quick check on metrics
+    Map<String, Long> counters = reportMetricCounters("list_namespaces", 
"list_tables");
+    counters.entrySet().forEach(m->{
+      Assert.assertTrue(m.getKey(), m.getValue() > 0);
+    });
+  }
+  
+  private Schema getTestSchema() {
+    return new Schema(
+        required(1, "id", Types.IntegerType.get(), "unique ID"),
+        required(2, "data", Types.StringType.get()));
+  }
+
+  
+  @Test
+  public void testCreateTableTxnBuilder() throws Exception {
+    Schema schema = getTestSchema();
+    final String tblName = "tbl_" + Integer.toHexString(RND.nextInt(65536));
+    final TableIdentifier tableIdent = TableIdentifier.of(DB_NAME, tblName);
+    String location = temp.newFolder(tableIdent.toString()).toString();
+
+    try {
+      Transaction txn = catalog.buildTable(tableIdent, schema)
+          .withLocation(location)
+          .createTransaction();
+      txn.commitTransaction();
+      Table table = catalog.loadTable(tableIdent);
+
+      Assert.assertEquals(location, table.location());
+      Assert.assertEquals(2, table.schema().columns().size());
+      Assert.assertTrue(table.spec().isUnpartitioned());
+      List<TableIdentifier> tis = catalog.listTables(Namespace.of(DB_NAME));
+      Assert.assertFalse(tis.isEmpty());
+
+      // list namespaces
+      URL url = new URL("http://hive@localhost:"; + catalogPort + 
"/"+catalogPath+"/v1/namespaces");
+      String jwt = generateJWT();
+      // succeed
+      Object response = clientCall(jwt, url, "GET", null);
+      Assert.assertNotNull(response);
+
+      // list tables in hivedb
+      url = new URL("http://hive@localhost:"; + catalogPort + "/" + 
catalogPath+"/v1/namespaces/" + DB_NAME + "/tables");
+      // succeed
+      response = clientCall(jwt, url, "GET", null);
+      Assert.assertNotNull(response);
+
+      // load table
+      url = new URL("http://hive@localhost:"; + catalogPort + "/" + 
catalogPath+"/v1/namespaces/" + DB_NAME + "/tables/" + tblName);
+      // succeed
+      response = clientCall(jwt, url, "GET", null);
+      Assert.assertNotNull(response);
+      String str = new Gson().toJson(response);
+
+      // quick check on metrics
+      Map<String, Long> counters = reportMetricCounters("list_namespaces", 
"list_tables", "load_table");
+      counters.forEach((key, value) -> Assert.assertTrue(key, value > 0));
+      table = catalog.loadTable(tableIdent);
+      Assert.assertNotNull(table);
+    } catch (Exception xany) {
+        String str = xany.getMessage();

Review Comment:
   never used



##########
standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/TestHMSCatalog.java:
##########
@@ -0,0 +1,158 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import com.google.gson.Gson;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.iceberg.Schema;
+import org.apache.iceberg.Table;
+import org.apache.iceberg.Transaction;
+import org.apache.iceberg.catalog.Namespace;
+import org.apache.iceberg.catalog.TableIdentifier;
+import org.apache.iceberg.types.Types;
+import static org.apache.iceberg.types.Types.NestedField.required;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.After;
+import org.junit.Test;
+
+public class TestHMSCatalog extends HMSTestBase {
+  public TestHMSCatalog() {
+    super();
+  }
+  
+  @Before
+  @Override
+  public void setUp() throws Exception {
+      super.setUp();
+  }
+  
+  @After
+  @Override
+  public void tearDown() throws Exception {
+      super.tearDown();
+  }
+
+  @Test
+  public void testCreateNamespaceHttp() throws Exception {
+    String ns = "nstesthttp";
+    // list namespaces
+    URL url = new URL("http://hive@localhost:"; + catalogPort + 
"/"+catalogPath+"/v1/namespaces");
+    String jwt = generateJWT();
+    // check namespaces list (ie 0)
+    Object response = clientCall(jwt, url, "GET", null);
+    Assert.assertTrue(response instanceof Map);
+    Map<String, Object> nsrep = (Map<String, Object>) response;
+    List<String> nslist = (List<String>) nsrep.get("namespaces");
+    Assert.assertEquals(2, nslist.size());
+    Assert.assertTrue((nslist.contains(Arrays.asList("default"))));
+    Assert.assertTrue((nslist.contains(Arrays.asList("hivedb"))));

Review Comment:
   `nslist` is a list of type `String`, how does this contain a list? How is 
this passing? Am I missing something here



##########
standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java:
##########
@@ -0,0 +1,425 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.MetricRegistry;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.ok;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.ToNumberPolicy;
+import com.google.gson.ToNumberStrategy;
+import com.nimbusds.jose.JWSAlgorithm;
+import com.nimbusds.jose.JWSHeader;
+import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.crypto.RSASSASigner;
+import com.nimbusds.jose.jwk.RSAKey;
+import com.nimbusds.jwt.JWTClaimsSet;
+import com.nimbusds.jwt.SignedJWT;
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Collections;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaException;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo;
+import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.metrics.Metrics;
+import org.apache.hadoop.hive.metastore.properties.HMSPropertyManager;
+import org.apache.hadoop.hive.metastore.properties.PropertyManager;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.catalog.SupportsNamespaces;
+import org.eclipse.jetty.server.Server;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class HMSTestBase {
+  protected static final Logger LOG = 
LoggerFactory.getLogger(HMSTestBase.class.getName());
+  protected static final String BASE_DIR = System.getProperty("basedir");
+  protected static Random RND = new Random(20230922);
+  protected static final String USER_1 = "USER_1";
+  protected static final String DB_NAME = "hivedb";
+
+  protected static final long EVICTION_INTERVAL = 
TimeUnit.SECONDS.toMillis(10);
+  private static final File JWT_AUTHKEY_FILE =
+      new File(BASE_DIR,"src/test/resources/auth/jwt/jwt-authorized-key.json");
+  protected static final File JWT_NOAUTHKEY_FILE =
+      new 
File(BASE_DIR,"src/test/resources/auth/jwt/jwt-unauthorized-key.json");
+  protected static final File JWT_JWKS_FILE =
+      new 
File(BASE_DIR,"src/test/resources/auth/jwt/jwt-verification-jwks.json");
+  protected static final int MOCK_JWKS_SERVER_PORT = 8089;
+  @ClassRule
+  public static final WireMockRule MOCK_JWKS_SERVER = new 
WireMockRule(MOCK_JWKS_SERVER_PORT);
+
+
+  public static class TestSchemaInfo extends MetaStoreSchemaInfo {
+    public TestSchemaInfo(String metastoreHome, String dbType) throws 
HiveMetaException {
+      super(metastoreHome, dbType);
+    }
+    @Override
+    public String getMetaStoreScriptDir() {
+      return  new File(BASE_DIR,"src/test/resources").getAbsolutePath() + 
File.separatorChar +
+          "scripts" + File.separatorChar + "metastore" +
+          File.separatorChar + "upgrade" + File.separatorChar + dbType;
+    }
+  }
+
+  @Rule
+  public TemporaryFolder temp = new TemporaryFolder();
+
+  protected Configuration conf = null;
+  protected String NS = "hms" + RND.nextInt(100);
+
+  protected int port = -1;
+  protected int catalogPort = -1;
+  protected final String catalogPath = "hmscatalog";
+  protected static final int WAIT_FOR_SERVER = 5000;
+  // for direct calls
+  protected Catalog catalog;
+  protected SupportsNamespaces nsCatalog;
+
+  protected int createMetastoreServer(Configuration conf) throws Exception {
+    return 
MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), 
conf);
+  }
+
+  protected void stopMetastoreServer(int port) {
+    MetaStoreTestUtils.close(port);
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    NS = "hms" + RND.nextInt(100);
+    conf = MetastoreConf.newMetastoreConf();
+    MetaStoreTestUtils.setConfForStandloneMode(conf);
+    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.CAPABILITY_CHECK, 
false);
+    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST, true);
+    // new 2024-10-02
+    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.SCHEMA_VERIFICATION, 
false);
+
+    conf.setBoolean(MetastoreConf.ConfVars.METRICS_ENABLED.getVarname(), true);
+    // "hive.metastore.warehouse.dir"
+    String whpath = new 
File(BASE_DIR,"target/tmp/warehouse/managed").toURI()/*.getAbsolutePath()*/.toString();
+    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.WAREHOUSE, whpath);
+    HiveConf.setVar(conf, HiveConf.ConfVars.METASTORE_WAREHOUSE, whpath);
+    // "hive.metastore.warehouse.external.dir"
+    String extwhpath = new 
File(BASE_DIR,"target/tmp/warehouse/external").toURI()/*.getAbsolutePath()*/.toString();
+    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.WAREHOUSE_EXTERNAL, 
extwhpath);
+    conf.set(HiveConf.ConfVars.HIVE_METASTORE_WAREHOUSE_EXTERNAL.varname, 
extwhpath);

Review Comment:
   is this required? I thought this is deprecated in favor of the above config, 
if yes, maybe we need to fix that in some separate ticket



##########
standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java:
##########
@@ -911,6 +911,8 @@ public enum ConfVars {
     HMS_HANDLER_PROXY_CLASS("metastore.hmshandler.proxy", 
"hive.metastore.hmshandler.proxy",
         METASTORE_RETRYING_HANDLER_CLASS,
         "The proxy class name of HMSHandler, default is RetryingHMSHandler."),
+    HMS_HANDLER_CREATE("metastore.hmshandler.create", 
"metastore.hmshandler.create","newHMSHandler",
+            "The method name to create new HMSHandler"),

Review Comment:
   Can you add the possible values, I think ``newHMSRetryingLocalHandler`` is 
another and add a line describing them or diff between them for the users & 
maybe add a StringSetValidator to check there isn't any other value supplied 
unless we have some use case where we expect any other value to work as well



##########
standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/conf/MetastoreConf.java:
##########
@@ -1825,6 +1827,20 @@ public enum ConfVars {
         "hive.metastore.properties.servlet.auth", "jwt",
         "Property-maps servlet authentication method (simple or jwt)."
     ),
+    ICEBERG_CATALOG_SERVLET_PATH("hive.metastore.catalog.servlet.path",
+        "hive.metastore.catalog.servlet.path", "icecli",
+        "HMS Iceberg Catalog servlet path component of URL endpoint."
+    ),
+    ICEBERG_CATALOG_SERVLET_PORT("hive.metastore.catalog.servlet.port",
+        "hive.metastore.catalog.servlet.port", -1,
+        "HMS Iceberg Catalog servlet server port. Negative value disables the 
servlet," +
+            " 0 will let the system determine the catalog server port," +
+            " positive value will be used as-is."
+    ),
+    ICEBERG_CATALOG_SERVLET_AUTH("hive.metastore.catalog.servlet.auth",
+        "hive.metastore.catalog.servlet.auth", "jwt",
+        "HMS Iceberg Catalog servlet authentication method (simple or jwt)."

Review Comment:
   if there are two values possible, then maybe use ``StringSetValidator`` 



##########
standalone-metastore/metastore-catalog/src/test/java/org/apache/iceberg/rest/HMSTestBase.java:
##########
@@ -0,0 +1,425 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import com.codahale.metrics.Counter;
+import com.codahale.metrics.MetricRegistry;
+import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.ok;
+import com.github.tomakehurst.wiremock.junit.WireMockRule;
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import com.google.gson.ToNumberPolicy;
+import com.google.gson.ToNumberStrategy;
+import com.nimbusds.jose.JWSAlgorithm;
+import com.nimbusds.jose.JWSHeader;
+import com.nimbusds.jose.JWSSigner;
+import com.nimbusds.jose.crypto.RSASSASigner;
+import com.nimbusds.jose.jwk.RSAKey;
+import com.nimbusds.jwt.JWTClaimsSet;
+import com.nimbusds.jwt.SignedJWT;
+import java.io.BufferedReader;
+import java.io.DataOutputStream;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.Collections;
+import java.util.Date;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.UUID;
+import java.util.concurrent.TimeUnit;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaException;
+import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.MetaStoreSchemaInfo;
+import org.apache.hadoop.hive.metastore.MetaStoreTestUtils;
+import org.apache.hadoop.hive.metastore.ObjectStore;
+import org.apache.hadoop.hive.metastore.Warehouse;
+import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.metrics.Metrics;
+import org.apache.hadoop.hive.metastore.properties.HMSPropertyManager;
+import org.apache.hadoop.hive.metastore.properties.PropertyManager;
+import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.catalog.SupportsNamespaces;
+import org.eclipse.jetty.server.Server;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.rules.TemporaryFolder;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public abstract class HMSTestBase {
+  protected static final Logger LOG = 
LoggerFactory.getLogger(HMSTestBase.class.getName());
+  protected static final String BASE_DIR = System.getProperty("basedir");
+  protected static Random RND = new Random(20230922);
+  protected static final String USER_1 = "USER_1";
+  protected static final String DB_NAME = "hivedb";
+
+  protected static final long EVICTION_INTERVAL = 
TimeUnit.SECONDS.toMillis(10);
+  private static final File JWT_AUTHKEY_FILE =
+      new File(BASE_DIR,"src/test/resources/auth/jwt/jwt-authorized-key.json");
+  protected static final File JWT_NOAUTHKEY_FILE =
+      new 
File(BASE_DIR,"src/test/resources/auth/jwt/jwt-unauthorized-key.json");
+  protected static final File JWT_JWKS_FILE =
+      new 
File(BASE_DIR,"src/test/resources/auth/jwt/jwt-verification-jwks.json");
+  protected static final int MOCK_JWKS_SERVER_PORT = 8089;
+  @ClassRule
+  public static final WireMockRule MOCK_JWKS_SERVER = new 
WireMockRule(MOCK_JWKS_SERVER_PORT);
+
+
+  public static class TestSchemaInfo extends MetaStoreSchemaInfo {
+    public TestSchemaInfo(String metastoreHome, String dbType) throws 
HiveMetaException {
+      super(metastoreHome, dbType);
+    }
+    @Override
+    public String getMetaStoreScriptDir() {
+      return  new File(BASE_DIR,"src/test/resources").getAbsolutePath() + 
File.separatorChar +
+          "scripts" + File.separatorChar + "metastore" +
+          File.separatorChar + "upgrade" + File.separatorChar + dbType;
+    }
+  }
+
+  @Rule
+  public TemporaryFolder temp = new TemporaryFolder();
+
+  protected Configuration conf = null;
+  protected String NS = "hms" + RND.nextInt(100);
+
+  protected int port = -1;
+  protected int catalogPort = -1;
+  protected final String catalogPath = "hmscatalog";
+  protected static final int WAIT_FOR_SERVER = 5000;
+  // for direct calls
+  protected Catalog catalog;
+  protected SupportsNamespaces nsCatalog;
+
+  protected int createMetastoreServer(Configuration conf) throws Exception {
+    return 
MetaStoreTestUtils.startMetaStoreWithRetry(HadoopThriftAuthBridge.getBridge(), 
conf);
+  }
+
+  protected void stopMetastoreServer(int port) {
+    MetaStoreTestUtils.close(port);
+  }
+
+  @Before
+  public void setUp() throws Exception {
+    NS = "hms" + RND.nextInt(100);
+    conf = MetastoreConf.newMetastoreConf();
+    MetaStoreTestUtils.setConfForStandloneMode(conf);
+    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.CAPABILITY_CHECK, 
false);
+    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.HIVE_IN_TEST, true);
+    // new 2024-10-02
+    MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.SCHEMA_VERIFICATION, 
false);
+
+    conf.setBoolean(MetastoreConf.ConfVars.METRICS_ENABLED.getVarname(), true);
+    // "hive.metastore.warehouse.dir"
+    String whpath = new 
File(BASE_DIR,"target/tmp/warehouse/managed").toURI()/*.getAbsolutePath()*/.toString();
+    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.WAREHOUSE, whpath);
+    HiveConf.setVar(conf, HiveConf.ConfVars.METASTORE_WAREHOUSE, whpath);
+    // "hive.metastore.warehouse.external.dir"
+    String extwhpath = new 
File(BASE_DIR,"target/tmp/warehouse/external").toURI()/*.getAbsolutePath()*/.toString();
+    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.WAREHOUSE_EXTERNAL, 
extwhpath);
+    conf.set(HiveConf.ConfVars.HIVE_METASTORE_WAREHOUSE_EXTERNAL.varname, 
extwhpath);
+
+    MetastoreConf.setVar(conf, MetastoreConf.ConfVars.SCHEMA_INFO_CLASS, 
"org.apache.iceberg.rest.HMSTestBase$TestSchemaInfo");
+    // Events that get cleaned happen in batches of 1 to exercise batching code
+    MetastoreConf.setLongVar(conf, 
MetastoreConf.ConfVars.EVENT_CLEAN_MAX_EVENTS, 1L);
+    MetastoreConf.setLongVar(conf, 
MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PORT, 0);
+    MetastoreConf.setVar(conf, 
MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_AUTH, "jwt");
+    MetastoreConf.setVar(conf, 
MetastoreConf.ConfVars.ICEBERG_CATALOG_SERVLET_PATH, catalogPath);
+    MetastoreConf.setVar(conf, 
MetastoreConf.ConfVars.THRIFT_METASTORE_AUTHENTICATION_JWT_JWKS_URL,
+        "http://localhost:"; + MOCK_JWKS_SERVER_PORT + "/jwks");
+    MOCK_JWKS_SERVER.stubFor(get("/jwks")
+        .willReturn(ok()
+            .withBody(Files.readAllBytes(JWT_JWKS_FILE.toPath()))));
+    Metrics.initialize(conf);
+    // The server
+    port = createMetastoreServer(conf);
+    System.out.println("Starting MetaStore Server on port " + port);
+    // The manager decl
+    PropertyManager.declare(NS, HMSPropertyManager.class);
+    // The client
+    HiveMetaStoreClient client = createClient(conf, port);
+    Assert.assertNotNull("Unable to connect to the MetaStore server", client);
+
+    // create a managed root
+    Warehouse wh = new Warehouse(conf);
+    String location = temp.newFolder("hivedb2023").getAbsolutePath();
+    Database db = new Database(DB_NAME, "catalog test", location, 
Collections.emptyMap());
+    client.createDatabase(db);
+
+    int[] aport = { -1 };
+    Catalog ice = acquireServer(aport);
+      catalog =  ice;
+      nsCatalog = catalog instanceof SupportsNamespaces? (SupportsNamespaces) 
catalog : null;
+      catalogPort = aport[0];
+  }
+  
+  private static String format(String format, Object... params) {
+    return org.slf4j.helpers.MessageFormatter.arrayFormat(format, 
params).getMessage();
+  }
+
+  private static Catalog acquireServer(int[] port) throws InterruptedException 
{
+    final int wait = 200;
+    Server iceServer = HiveMetaStore.getIcebergServer();
+    int tries = WAIT_FOR_SERVER / wait;
+    while(iceServer == null && tries-- > 0) {
+      Thread.sleep(wait);
+      iceServer = HiveMetaStore.getIcebergServer();
+    }
+    if (iceServer != null) {
+      port[0] = iceServer.getURI().getPort();
+      boolean starting;
+      tries = WAIT_FOR_SERVER / wait;
+      while((starting = iceServer.isStarting()) && tries-- > 0) {
+        Thread.sleep(wait);
+      }
+      if (starting) {
+        LOG.warn("server still starting after {}ms", WAIT_FOR_SERVER);
+      }
+      Catalog ice = HMSCatalogServer.getLastCatalog();
+      if (ice == null) {
+        throw new NullPointerException(format("unable to acquire catalog after 
{}ms", WAIT_FOR_SERVER));
+      }
+      return ice;
+    } else {
+      throw new NullPointerException(format("unable to acquire server after 
{}ms", WAIT_FOR_SERVER));

Review Comment:
   Throwing NPE seems bit off to my eyes, can we rather throw RuntimeException 
or something else?



##########
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java:
##########
@@ -120,12 +122,17 @@ public class HiveMetaStore extends ThriftHiveMetastore {
   private static String msHost = null;
   private static ThriftServer thriftServer;
   private static Server propertyServer = null;
+  private static Server icebergServer = null;

Review Comment:
   nit
   Can we rename this as `icebergRestServer`



##########
standalone-metastore/metastore-catalog/src/main/java/org/apache/iceberg/rest/HMSCatalogAdapter.java:
##########
@@ -0,0 +1,672 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iceberg.rest;
+
+import com.codahale.metrics.Counter;
+import org.apache.hadoop.hive.metastore.metrics.Metrics;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.function.Consumer;
+import java.util.stream.Collectors;
+import org.apache.iceberg.BaseTable;
+import org.apache.iceberg.BaseTransaction;
+import org.apache.iceberg.Table;
+import org.apache.iceberg.Transaction;
+import org.apache.iceberg.Transactions;
+import org.apache.iceberg.catalog.Catalog;
+import org.apache.iceberg.catalog.Namespace;
+import org.apache.iceberg.catalog.SupportsNamespaces;
+import org.apache.iceberg.catalog.TableIdentifier;
+import org.apache.iceberg.exceptions.AlreadyExistsException;
+import org.apache.iceberg.exceptions.CommitFailedException;
+import org.apache.iceberg.exceptions.CommitStateUnknownException;
+import org.apache.iceberg.exceptions.ForbiddenException;
+import org.apache.iceberg.exceptions.NamespaceNotEmptyException;
+import org.apache.iceberg.exceptions.NoSuchIcebergTableException;
+import org.apache.iceberg.exceptions.NoSuchNamespaceException;
+import org.apache.iceberg.exceptions.NoSuchTableException;
+import org.apache.iceberg.exceptions.NotAuthorizedException;
+import org.apache.iceberg.exceptions.RESTException;
+import org.apache.iceberg.exceptions.UnprocessableEntityException;
+import org.apache.iceberg.exceptions.ValidationException;
+import org.apache.iceberg.relocated.com.google.common.base.Splitter;
+import org.apache.iceberg.relocated.com.google.common.collect.ImmutableMap;
+import org.apache.iceberg.relocated.com.google.common.collect.Lists;
+import org.apache.iceberg.rest.requests.CommitTransactionRequest;
+import org.apache.iceberg.rest.requests.CreateNamespaceRequest;
+import org.apache.iceberg.rest.requests.CreateTableRequest;
+import org.apache.iceberg.rest.requests.RegisterTableRequest;
+import org.apache.iceberg.rest.requests.RenameTableRequest;
+import org.apache.iceberg.rest.requests.ReportMetricsRequest;
+import org.apache.iceberg.rest.requests.UpdateNamespacePropertiesRequest;
+import org.apache.iceberg.rest.requests.UpdateTableRequest;
+import org.apache.iceberg.rest.responses.ConfigResponse;
+import org.apache.iceberg.rest.responses.CreateNamespaceResponse;
+import org.apache.iceberg.rest.responses.ErrorResponse;
+import org.apache.iceberg.rest.responses.GetNamespaceResponse;
+import org.apache.iceberg.rest.responses.ListNamespacesResponse;
+import org.apache.iceberg.rest.responses.ListTablesResponse;
+import org.apache.iceberg.rest.responses.LoadTableResponse;
+import org.apache.iceberg.rest.responses.OAuthTokenResponse;
+import org.apache.iceberg.rest.responses.UpdateNamespacePropertiesResponse;
+import org.apache.iceberg.util.Pair;
+import org.apache.iceberg.util.PropertyUtil;
+
+/**
+ * Original @ 
https://github.com/apache/iceberg/blob/main/core/src/test/java/org/apache/iceberg/rest/RESTCatalogAdapter.java
+ * Adaptor class to translate REST requests into {@link Catalog} API calls.
+ */
+public class HMSCatalogAdapter implements RESTClient {

Review Comment:
   Should extend ``RESTCatalogAdapter`` and just implement the modified or new 
methods rather than copying the entire code from that class



##########
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java:
##########
@@ -732,10 +766,26 @@ public static void startMetaStore(int port, 
HadoopThriftAuthBridge bridge,
     }
     // optionally create and start the property server and servlet
     propertyServer = PropertyServlet.startServer(conf);
+    // optionally create and start the Iceberg REST server and servlet
+    icebergServer = startIcebergCatalog(conf);
 
     thriftServer.start();
   }
 
+  static Server startIcebergCatalog(Configuration configuration) {
+    try {
+      Class<?> iceClazz = 
Class.forName("org.apache.iceberg.rest.HMSCatalogServer");
+      Method iceStart = iceClazz.getMethod("startServer", Configuration.class);
+      return (Server) iceStart.invoke(null, configuration);
+    } catch (ClassNotFoundException xnf) {
+      LOG.warn("unable to start Iceberg REST Catalog server {}, missing jar?", 
xnf);

Review Comment:
   you haven't supplied value for the {} placeholder, the xnf classifies as 
Throwable I believe not as value for {}, maybe just 
   ```
         LOG.warn("Unable to start Iceberg REST Catalog server, Missing Jar?", 
xnf);
   ```



##########
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java:
##########
@@ -732,10 +766,26 @@ public static void startMetaStore(int port, 
HadoopThriftAuthBridge bridge,
     }
     // optionally create and start the property server and servlet
     propertyServer = PropertyServlet.startServer(conf);
+    // optionally create and start the Iceberg REST server and servlet
+    icebergServer = startIcebergCatalog(conf);
 
     thriftServer.start();
   }
 
+  static Server startIcebergCatalog(Configuration configuration) {
+    try {
+      Class<?> iceClazz = 
Class.forName("org.apache.iceberg.rest.HMSCatalogServer");
+      Method iceStart = iceClazz.getMethod("startServer", Configuration.class);
+      return (Server) iceStart.invoke(null, configuration);
+    } catch (ClassNotFoundException xnf) {
+      LOG.warn("unable to start Iceberg REST Catalog server {}, missing jar?", 
xnf);
+      return null;
+    } catch (NoSuchMethodException | IllegalAccessException | 
InvocationTargetException e) {
+      LOG.error("unable to start Iceberg REST Catalog server {}", e);

Review Comment:
   same as above, I think {} isn't required
   ```
         LOG.error("Unable to start Iceberg REST Catalog server", e);
   ```



##########
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/SecureServletCaller.java:
##########
@@ -0,0 +1,60 @@
+/* * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import java.io.IOException;
+
+/**
+ * Secures servlet processing.
+ */
+public interface SecureServletCaller {
+  /**
+   * Should be called in Servlet.init()
+   * @throws ServletException if the jwt validator creation throws an exception
+   */
+  public void init() throws ServletException;
+
+  /**
+   * Any http method executor.
+   */
+  @FunctionalInterface
+  interface MethodExecutor {
+    /**
+     * The method to call to secure the execution of a (http) method.
+     * @param request the request
+     * @param response the response
+     * @throws ServletException if the method executor fails
+     * @throws IOException if the Json in/out fail
+     */
+    void execute(HttpServletRequest request, HttpServletResponse response) 
throws ServletException, IOException;
+  }
+
+  /**
+   * The method to call to secure the execution of a (http) method.
+   * @param request the request
+   * @param response the response
+   * @param executor the method executor
+   */
+   void execute(HttpServletRequest request, HttpServletResponse response, 
MethodExecutor executor)

Review Comment:
   nit
   can you add override notation to the child classes



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to