[ 
https://issues.apache.org/jira/browse/HIVE-21456?focusedWorklogId=755363&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-755363
 ]

ASF GitHub Bot logged work on HIVE-21456:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 11/Apr/22 18:15
            Start Date: 11/Apr/22 18:15
    Worklog Time Spent: 10m 
      Work Description: sourabh912 commented on code in PR #3105:
URL: https://github.com/apache/hive/pull/3105#discussion_r847609721


##########
standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestRemoteHiveHttpMetaStore.java:
##########
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore;
+
+import org.apache.hadoop.hive.metastore.annotation.MetastoreUnitTest;
+import org.junit.experimental.categories.Category;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.hive.metastore.annotation.MetastoreCheckinTest;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
+
+@Category(MetastoreCheckinTest.class)
+public class TestRemoteHiveHttpMetaStore extends TestRemoteHiveMetaStore {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(TestRemoteHiveHttpMetaStore.class);
+
+  @Override
+  public void start() throws Exception {
+    MetastoreConf.setVar(conf, ConfVars.THRIFT_TRANSPORT_MODE, "http");
+    LOG.info("Attempting to start test remote metastore in http mode");
+    super.start();
+    LOG.info("Successfully started test remote metastore in http mode");
+  }
+
+  @Override
+  protected HiveMetaStoreClient createClient() throws Exception {
+    MetastoreConf.setVar(conf, 
ConfVars.METASTORE_CLIENT_THRIFT_TRANSPORT_MODE, "http");
+    return super.createClient();
+  }
+}

Review Comment:
   Done



##########
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HmsThriftHttpServlet.java:
##########
@@ -0,0 +1,116 @@
+/* * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.metastore;
+
+import java.io.IOException;
+import java.security.PrivilegedExceptionAction;
+import java.util.Enumeration;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import org.apache.hadoop.hive.metastore.utils.MetaStoreUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.thrift.TProcessor;
+import org.apache.thrift.protocol.TProtocolFactory;
+import org.apache.thrift.server.TServlet;
+
+public class HmsThriftHttpServlet extends TServlet {
+
+  private static final Logger LOG = LoggerFactory
+      .getLogger(HmsThriftHttpServlet.class);
+
+  private static final String X_USER = MetaStoreUtils.USER_NAME_HTTP_HEADER;
+
+  private final boolean isSecurityEnabled;
+
+  public HmsThriftHttpServlet(TProcessor processor,
+      TProtocolFactory inProtocolFactory, TProtocolFactory outProtocolFactory) 
{
+    super(processor, inProtocolFactory, outProtocolFactory);
+    // This should ideally be reveiving an instance of the Configuration which 
is used for the check
+    isSecurityEnabled = UserGroupInformation.isSecurityEnabled();
+  }
+
+  public HmsThriftHttpServlet(TProcessor processor,
+      TProtocolFactory protocolFactory) {
+    super(processor, protocolFactory);
+    isSecurityEnabled = UserGroupInformation.isSecurityEnabled();
+  }
+
+  @Override
+  protected void doPost(HttpServletRequest request,
+      HttpServletResponse response) throws ServletException, IOException {
+
+    Enumeration<String> headerNames = request.getHeaderNames();
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Logging headers in request");
+      while (headerNames.hasMoreElements()) {
+        String headerName = headerNames.nextElement();
+        LOG.debug("Header: [{}], Value: [{}]", headerName,
+            request.getHeader(headerName));
+      }
+    }
+    String userFromHeader = request.getHeader(X_USER);
+    if (userFromHeader == null || userFromHeader.isEmpty()) {
+      LOG.error("No user header: {} found", X_USER);
+      response.sendError(HttpServletResponse.SC_FORBIDDEN,
+          "User Header missing");
+      return;
+    }
+
+    // TODO: These should ideally be in some kind of a Cache with Weak 
referencse.
+    // If HMS were to set up some kind of a session, this would go into the 
session by having
+    // this filter work with a custom Processor / or set the username into the 
session
+    // as is done for HS2.
+    // In case of HMS, it looks like each request is independent, and there is 
no session
+    // information, so the UGI needs to be set up in the Connection layer 
itself.
+    UserGroupInformation clientUgi;
+    // Temporary, and useless for now. Here only to allow this to work on an 
otherwise kerberized
+    // server.
+    if (isSecurityEnabled) {
+      LOG.info("Creating proxy user for: {}", userFromHeader);
+      clientUgi = UserGroupInformation.createProxyUser(userFromHeader, 
UserGroupInformation.getLoginUser());
+    } else {
+      LOG.info("Creating remote user for: {}", userFromHeader);
+      clientUgi = UserGroupInformation.createRemoteUser(userFromHeader);
+    }
+
+
+    PrivilegedExceptionAction<Void> action = new 
PrivilegedExceptionAction<Void>() {
+      @Override
+      public Void run() throws Exception {
+        HmsThriftHttpServlet.super.doPost(request, response);
+        return null;
+      }
+    };
+
+    try {
+      clientUgi.doAs(action);
+    } catch (InterruptedException | RuntimeException e) {
+      // TODO: Exception handling likely needs to be better, so that the client
+      // can make better sense of what has gone wrong. Lookup what this looks 
like
+      // in the default thrift binary interface.
+      LOG.info("Exception while processing call", e);
+      response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
+          "C1C User Header missing");
+    }
+  }
+}

Review Comment:
   Done





Issue Time Tracking
-------------------

    Worklog Id:     (was: 755363)
    Time Spent: 4.5h  (was: 4h 20m)

> Hive Metastore Thrift over HTTP
> -------------------------------
>
>                 Key: HIVE-21456
>                 URL: https://issues.apache.org/jira/browse/HIVE-21456
>             Project: Hive
>          Issue Type: New Feature
>          Components: Metastore, Standalone Metastore
>            Reporter: Amit Khanna
>            Assignee: Sourabh Goyal
>            Priority: Major
>              Labels: pull-request-available
>         Attachments: HIVE-21456.2.patch, HIVE-21456.3.patch, 
> HIVE-21456.4.patch, HIVE-21456.patch
>
>          Time Spent: 4.5h
>  Remaining Estimate: 0h
>
> Hive Metastore currently doesn't have support for HTTP transport because of 
> which it is not possible to access it via Knox. Adding support for Thrift 
> over HTTP transport will allow the clients to access via Knox



--
This message was sent by Atlassian Jira
(v8.20.1#820001)

Reply via email to