Repository: ambari
Updated Branches:
  refs/heads/branch-2.1.2 79dc49ae0 -> f54752960


AMBARI-13343. Ambari Hive View should be able to talk HTTP to HS2 (Gaurav Nagar 
via srimanth)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/f5475296
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/f5475296
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/f5475296

Branch: refs/heads/branch-2.1.2
Commit: f54752960fef17920d2b53f4f2187821a9068ac8
Parents: 79dc49a
Author: Srimanth Gunturi <sgunt...@hortonworks.com>
Authored: Mon Oct 19 06:50:43 2015 -0700
Committer: Srimanth Gunturi <sgunt...@hortonworks.com>
Committed: Mon Oct 19 11:19:11 2015 -0700

----------------------------------------------------------------------
 contrib/views/hive/pom.xml                      |  12 +-
 .../ambari/view/hive/client/Connection.java     | 262 ++++++++++++++++++-
 .../view/hive/client/ConnectionFactory.java     |  11 +-
 .../hive/client/HttpBasicAuthInterceptor.java   |  55 ++++
 .../client/HttpKerberosRequestInterceptor.java  |  72 +++++
 .../hive/client/HttpRequestInterceptorBase.java |  88 +++++++
 .../apache/ambari/view/hive/client/Utils.java   |  47 +++-
 contrib/views/hive/src/main/resources/view.xml  |  30 +++
 .../ambari/view/hive/client/ConnectionTest.java |   6 +-
 .../hive/resources/jobs/JobLDAPServiceTest.java |   5 +-
 10 files changed, 578 insertions(+), 10 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/f5475296/contrib/views/hive/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive/pom.xml b/contrib/views/hive/pom.xml
index 3a1cf6d..a78eea2 100644
--- a/contrib/views/hive/pom.xml
+++ b/contrib/views/hive/pom.xml
@@ -14,7 +14,7 @@
    See the License for the specific language governing permissions and
    limitations under the License.
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xmlns="http://maven.apache.org/POM/4.0.0";
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
   <modelVersion>4.0.0</modelVersion>
   <groupId>org.apache.ambari.contrib.views</groupId>
@@ -199,6 +199,16 @@
       <artifactId>commons-io</artifactId>
       <version>2.4</version>
     </dependency>
+    <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpclient</artifactId>
+      <version>4.5.1</version>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpcore</artifactId>
+      <version>4.4.3</version>
+    </dependency>
   </dependencies>
 
   <properties>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5475296/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
index d93df24..0e42d99 100644
--- 
a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
+++ 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Connection.java
@@ -27,16 +27,38 @@ import org.apache.hive.service.auth.KerberosSaslHelper;
 import org.apache.hive.service.auth.PlainSaslHelper;
 import org.apache.hive.service.auth.SaslQOP;
 import org.apache.hive.service.cli.thrift.*;
+import org.apache.http.HttpRequestInterceptor;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.CookieStore;
+import org.apache.http.client.ServiceUnavailableRetryStrategy;
+import org.apache.http.config.Registry;
+import org.apache.http.config.RegistryBuilder;
+import org.apache.http.conn.socket.ConnectionSocketFactory;
+import org.apache.http.conn.ssl.SSLSocketFactory;
+import org.apache.http.impl.client.BasicCookieStore;
+import org.apache.http.impl.client.CloseableHttpClient;
+import org.apache.http.impl.client.HttpClientBuilder;
+import org.apache.http.impl.client.HttpClients;
+import org.apache.http.impl.conn.BasicHttpClientConnectionManager;
+import org.apache.http.protocol.HttpContext;
 import org.apache.thrift.TException;
 import org.apache.thrift.protocol.TBinaryProtocol;
+import org.apache.thrift.transport.THttpClient;
 import org.apache.thrift.transport.TTransport;
 import org.apache.thrift.transport.TTransportException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import javax.net.ssl.KeyManagerFactory;
+import javax.net.ssl.SSLContext;
+import javax.net.ssl.TrustManagerFactory;
 import javax.security.sasl.Sasl;
 import javax.security.sasl.SaslException;
+import java.io.FileInputStream;
 import java.io.IOException;
+import java.security.KeyStore;
+import java.security.SecureRandom;
+import java.sql.SQLException;
 import java.util.HashMap;
 import java.util.Iterator;
 import java.util.List;
@@ -81,12 +103,14 @@ public class Connection {
 
   public synchronized void openConnection() throws HiveClientException, 
HiveAuthRequiredException {
     try {
-      transport = getTransport();
+      transport = isHttpTransportMode() ? createHttpTransport() : 
createBinaryTransport();
       transport.open();
       client = new TCLIService.Client(new TBinaryProtocol(transport));
     } catch (TTransportException e) {
       throw new HiveClientException("H020 Could not establish connecton to "
           + host + ":" + port + ": " + e.toString(), e);
+    } catch (SQLException e) {
+      throw new HiveClientException(e.getMessage(), e);
     }
     LOG.info("Hive connection opened");
   }
@@ -97,7 +121,7 @@ public class Connection {
    * @return transport
    * @throws HiveClientException
    */
-  protected TTransport getTransport() throws HiveClientException, 
TTransportException, HiveAuthRequiredException {
+  protected TTransport createBinaryTransport() throws HiveClientException, 
TTransportException, HiveAuthRequiredException {
     TTransport transport;
     boolean assumeSubject =
         
Utils.HiveAuthenticationParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(authParams
@@ -171,6 +195,195 @@ public class Connection {
     return transport;
   }
 
+  private String getServerHttpUrl(boolean useSsl) {
+    // Create the http/https url
+    // JDBC driver will set up an https url if ssl is enabled, otherwise http
+    String schemeName = useSsl ? "https" : "http";
+    // http path should begin with "/"
+    String httpPath;
+    httpPath = authParams.get(Utils.HiveAuthenticationParams.HTTP_PATH);
+    if (httpPath == null) {
+      httpPath = "/";
+    } else if (!httpPath.startsWith("/")) {
+      httpPath = "/" + httpPath;
+    }
+    return schemeName + "://" + host + ":" + port + httpPath;
+  }
+
+  private TTransport createHttpTransport() throws SQLException, 
TTransportException {
+    CloseableHttpClient httpClient;
+    boolean useSsl = isSslConnection();
+    // Create an http client from the configs
+    httpClient = getHttpClient(useSsl);
+    try {
+      transport = new THttpClient(getServerHttpUrl(useSsl), httpClient);
+      // We'll call an open/close here to send a test HTTP message to the 
server. Any
+      // TTransportException caused by trying to connect to a non-available 
peer are thrown here.
+      // Bubbling them up the call hierarchy so that a retry can happen in 
openTransport,
+      // if dynamic service discovery is configured.
+      TCLIService.Iface client = new TCLIService.Client(new 
TBinaryProtocol(transport));
+      TOpenSessionResp openResp = client.OpenSession(new TOpenSessionReq());
+      if (openResp != null) {
+        client.CloseSession(new TCloseSessionReq(openResp.getSessionHandle()));
+      }
+    } catch (TException e) {
+      LOG.info("JDBC Connection Parameters used : useSSL = " + useSsl + " , 
httpPath  = " +
+          authParams.get(Utils.HiveAuthenticationParams.HTTP_PATH) + " 
Authentication type = " +
+          authParams.get(Utils.HiveAuthenticationParams.AUTH_TYPE));
+      String msg = "Could not create http connection to " +
+          getServerHttpUrl(useSsl) + ". " + e.getMessage();
+      throw new TTransportException(msg, e);
+    }
+    return transport;
+  }
+
+  private CloseableHttpClient getHttpClient(Boolean useSsl) throws 
SQLException {
+    boolean isCookieEnabled = 
authParams.get(Utils.HiveAuthenticationParams.COOKIE_AUTH) == null ||
+        (!Utils.HiveAuthenticationParams.COOKIE_AUTH_FALSE.equalsIgnoreCase(
+            authParams.get(Utils.HiveAuthenticationParams.COOKIE_AUTH)));
+    String cookieName = 
authParams.get(Utils.HiveAuthenticationParams.COOKIE_NAME) == null ?
+        Utils.HiveAuthenticationParams.DEFAULT_COOKIE_NAMES_HS2 :
+        authParams.get(Utils.HiveAuthenticationParams.COOKIE_NAME);
+    CookieStore cookieStore = isCookieEnabled ? new BasicCookieStore() : null;
+    HttpClientBuilder httpClientBuilder;
+    // Request interceptor for any request pre-processing logic
+    HttpRequestInterceptor requestInterceptor;
+    Map<String, String> additionalHttpHeaders = new HashMap<String, String>();
+
+    // Retrieve the additional HttpHeaders
+    for (Map.Entry<String, String> entry : authParams.entrySet()) {
+      String key = entry.getKey();
+
+      if (key.startsWith(Utils.HiveAuthenticationParams.HTTP_HEADER_PREFIX)) {
+        
additionalHttpHeaders.put(key.substring(Utils.HiveAuthenticationParams.HTTP_HEADER_PREFIX.length()),
+            entry.getValue());
+      }
+    }
+    // Configure http client for kerberos/password based authentication
+    if (isKerberosAuthMode()) {
+      /**
+       * Add an interceptor which sets the appropriate header in the request.
+       * It does the kerberos authentication and get the final service ticket,
+       * for sending to the server before every request.
+       * In https mode, the entire information is encrypted
+       */
+
+      Boolean assumeSubject =
+          
Utils.HiveAuthenticationParams.AUTH_KERBEROS_AUTH_TYPE_FROM_SUBJECT.equals(authParams
+              .get(Utils.HiveAuthenticationParams.AUTH_KERBEROS_AUTH_TYPE));
+      requestInterceptor =
+          new 
HttpKerberosRequestInterceptor(authParams.get(Utils.HiveAuthenticationParams.AUTH_PRINCIPAL),
+              host, getServerHttpUrl(useSsl), assumeSubject, cookieStore, 
cookieName, useSsl,
+              additionalHttpHeaders);
+    } else {
+      /**
+       * Add an interceptor to pass username/password in the header.
+       * In https mode, the entire information is encrypted
+       */
+      requestInterceptor = new HttpBasicAuthInterceptor(
+          getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_USER, 
getUsername())
+          , getPassword(),cookieStore, cookieName, useSsl,
+          additionalHttpHeaders);
+    }
+    // Configure http client for cookie based authentication
+    if (isCookieEnabled) {
+      // Create a http client with a retry mechanism when the server returns a 
status code of 401.
+      httpClientBuilder =
+          HttpClients.custom().setServiceUnavailableRetryStrategy(
+              new ServiceUnavailableRetryStrategy() {
+
+                @Override
+                public boolean retryRequest(
+                    final HttpResponse response,
+                    final int executionCount,
+                    final HttpContext context) {
+                  int statusCode = response.getStatusLine().getStatusCode();
+                  boolean ret = statusCode == 401 && executionCount <= 1;
+
+                  // Set the context attribute to true which will be 
interpreted by the request interceptor
+                  if (ret) {
+                    context.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, 
Utils.HIVE_SERVER2_RETRY_TRUE);
+                  }
+                  return ret;
+                }
+
+                @Override
+                public long getRetryInterval() {
+                  // Immediate retry
+                  return 0;
+                }
+              });
+    } else {
+      httpClientBuilder = HttpClientBuilder.create();
+    }
+    // Add the request interceptor to the client builder
+    httpClientBuilder.addInterceptorFirst(requestInterceptor);
+    // Configure http client for SSL
+    if (useSsl) {
+      String useTwoWaySSL = 
authParams.get(Utils.HiveAuthenticationParams.USE_TWO_WAY_SSL);
+      String sslTrustStorePath = 
authParams.get(Utils.HiveAuthenticationParams.SSL_TRUST_STORE);
+      String sslTrustStorePassword = authParams.get(
+          Utils.HiveAuthenticationParams.SSL_TRUST_STORE_PASSWORD);
+      KeyStore sslTrustStore;
+      SSLSocketFactory socketFactory;
+
+      /**
+       * The code within the try block throws:
+       * 1. SSLInitializationException
+       * 2. KeyStoreException
+       * 3. IOException
+       * 4. NoSuchAlgorithmException
+       * 5. CertificateException
+       * 6. KeyManagementException
+       * 7. UnrecoverableKeyException
+       * We don't want the client to retry on any of these, hence we catch all
+       * and throw a SQLException.
+       */
+      try {
+        if (useTwoWaySSL != null &&
+            
useTwoWaySSL.equalsIgnoreCase(Utils.HiveAuthenticationParams.TRUE)) {
+          socketFactory = getTwoWaySSLSocketFactory();
+        } else if (sslTrustStorePath == null || sslTrustStorePath.isEmpty()) {
+          // Create a default socket factory based on standard JSSE trust 
material
+          socketFactory = SSLSocketFactory.getSocketFactory();
+        } else {
+          // Pick trust store config from the given path
+          sslTrustStore = 
KeyStore.getInstance(Utils.HiveAuthenticationParams.SSL_TRUST_STORE_TYPE);
+          try (FileInputStream fis = new FileInputStream(sslTrustStorePath)) {
+            sslTrustStore.load(fis, sslTrustStorePassword.toCharArray());
+          }
+          socketFactory = new SSLSocketFactory(sslTrustStore);
+        }
+        
socketFactory.setHostnameVerifier(SSLSocketFactory.ALLOW_ALL_HOSTNAME_VERIFIER);
+
+        final Registry<ConnectionSocketFactory> registry =
+            RegistryBuilder.<ConnectionSocketFactory>create()
+                .register("https", socketFactory)
+                .build();
+
+        httpClientBuilder.setConnectionManager(new 
BasicHttpClientConnectionManager(registry));
+      } catch (Exception e) {
+        String msg = "Could not create an https connection to " +
+            getServerHttpUrl(useSsl) + ". " + e.getMessage();
+        throw new SQLException(msg, " 08S01", e);
+      }
+    }
+    return httpClientBuilder.build();
+  }
+
+  private boolean isKerberosAuthMode() {
+    return 
!Utils.HiveAuthenticationParams.AUTH_SIMPLE.equals(authParams.get(Utils.HiveAuthenticationParams.AUTH_TYPE))
+        && 
authParams.containsKey(Utils.HiveAuthenticationParams.AUTH_PRINCIPAL);
+  }
+
+  private boolean isHttpTransportMode() {
+    String transportMode = 
authParams.get(Utils.HiveAuthenticationParams.TRANSPORT_MODE);
+    if (transportMode != null && (transportMode.equalsIgnoreCase("http"))) {
+      return true;
+    }
+    return false;
+  }
+
   private String getPassword() throws HiveAuthRequiredException {
     String password = 
getAuthParamDefault(Utils.HiveAuthenticationParams.AUTH_PASSWD, 
Utils.HiveAuthenticationParams.ANONYMOUS_USER);
     if (password.equals("${ask_password}")) {
@@ -183,6 +396,51 @@ public class Connection {
     return password;
   }
 
+  SSLSocketFactory getTwoWaySSLSocketFactory() throws SQLException {
+    SSLSocketFactory socketFactory = null;
+
+    try {
+      KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(
+          Utils.HiveAuthenticationParams.SUNX509_ALGORITHM_STRING,
+          Utils.HiveAuthenticationParams.SUNJSSE_ALGORITHM_STRING);
+      String keyStorePath = 
authParams.get(Utils.HiveAuthenticationParams.SSL_KEY_STORE);
+      String keyStorePassword = 
authParams.get(Utils.HiveAuthenticationParams.SSL_KEY_STORE_PASSWORD);
+      KeyStore sslKeyStore = 
KeyStore.getInstance(Utils.HiveAuthenticationParams.SSL_KEY_STORE_TYPE);
+
+      if (keyStorePath == null || keyStorePath.isEmpty()) {
+        throw new 
IllegalArgumentException(Utils.HiveAuthenticationParams.SSL_KEY_STORE
+            + " Not configured for 2 way SSL connection, keyStorePath param is 
empty");
+      }
+      try (FileInputStream fis = new FileInputStream(keyStorePath)) {
+        sslKeyStore.load(fis, keyStorePassword.toCharArray());
+      }
+      keyManagerFactory.init(sslKeyStore, keyStorePassword.toCharArray());
+
+      TrustManagerFactory trustManagerFactory = 
TrustManagerFactory.getInstance(
+          Utils.HiveAuthenticationParams.SUNX509_ALGORITHM_STRING);
+      String trustStorePath = 
authParams.get(Utils.HiveAuthenticationParams.SSL_TRUST_STORE);
+      String trustStorePassword = authParams.get(
+          Utils.HiveAuthenticationParams.SSL_TRUST_STORE_PASSWORD);
+      KeyStore sslTrustStore = 
KeyStore.getInstance(Utils.HiveAuthenticationParams.SSL_TRUST_STORE_TYPE);
+
+      if (trustStorePath == null || trustStorePath.isEmpty()) {
+        throw new 
IllegalArgumentException(Utils.HiveAuthenticationParams.SSL_TRUST_STORE
+            + " Not configured for 2 way SSL connection");
+      }
+      try (FileInputStream fis = new FileInputStream(trustStorePath)) {
+        sslTrustStore.load(fis, trustStorePassword.toCharArray());
+      }
+      trustManagerFactory.init(sslTrustStore);
+      SSLContext context = SSLContext.getInstance("TLS");
+      context.init(keyManagerFactory.getKeyManagers(),
+          trustManagerFactory.getTrustManagers(), new SecureRandom());
+      socketFactory = new SSLSocketFactory(context);
+    } catch (Exception e) {
+      throw new SQLException("Error while initializing 2 way ssl socket 
factory ", e);
+    }
+    return socketFactory;
+  }
+
   private boolean isSslConnection() {
     return 
"true".equalsIgnoreCase(authParams.get(Utils.HiveAuthenticationParams.USE_SSL));
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5475296/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
index d0f9c8b..82ac1f5 100644
--- 
a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
+++ 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/ConnectionFactory.java
@@ -75,7 +75,14 @@ public class ConnectionFactory implements 
UserLocalFactory<Connection> {
   }
 
   private String getHivePort() {
-    return context.getProperties().get("hive.port");
+    Boolean isHttpMode = 
context.getProperties().get("hive.transport.mode").equalsIgnoreCase("http");
+    String port;
+    if(isHttpMode){
+      port = context.getProperties().get("hive.http.port");
+    }else{
+      port = context.getProperties().get("hive.port");
+    }
+    return  port;
   }
 
   private Map<String, String> getHiveAuthParams() {
@@ -92,6 +99,8 @@ public class ConnectionFactory implements 
UserLocalFactory<Connection> {
       }
       params.put(keyvalue[0], keyvalue[1]);
     }
+    
params.put(Utils.HiveAuthenticationParams.TRANSPORT_MODE,context.getProperties().get("hive.transport.mode"));
+    
params.put(Utils.HiveAuthenticationParams.HTTP_PATH,context.getProperties().get("hive.http.path"));
     return params;
   }
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5475296/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpBasicAuthInterceptor.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpBasicAuthInterceptor.java
 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpBasicAuthInterceptor.java
new file mode 100644
index 0000000..dea8fcb
--- /dev/null
+++ 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpBasicAuthInterceptor.java
@@ -0,0 +1,55 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.http.Header;
+import org.apache.http.HttpRequest;
+import org.apache.http.auth.UsernamePasswordCredentials;
+import org.apache.http.client.CookieStore;
+import org.apache.http.impl.auth.AuthSchemeBase;
+import org.apache.http.impl.auth.BasicScheme;
+import org.apache.http.protocol.HttpContext;
+
+import java.util.Map;
+
+/**
+ * The class is instantiated with the username and password, it is then
+ * used to add header with these credentials to HTTP requests
+ *
+ */
+public class HttpBasicAuthInterceptor extends HttpRequestInterceptorBase {
+  UsernamePasswordCredentials credentials;
+  AuthSchemeBase authScheme;
+
+  public HttpBasicAuthInterceptor(String username, String password, 
CookieStore cookieStore,
+                           String cn, boolean isSSL, Map<String, String> 
additionalHeaders) {
+    super(cookieStore, cn, isSSL, additionalHeaders);
+    this.authScheme = new BasicScheme();
+    if (username != null){
+      this.credentials = new UsernamePasswordCredentials(username, password);
+    }
+  }
+
+  @Override
+  protected void addHttpAuthHeader(HttpRequest httpRequest, HttpContext 
httpContext)
+    throws Exception {
+    Header basicAuthHeader = authScheme.authenticate(credentials, httpRequest, 
httpContext);
+    httpRequest.addHeader(basicAuthHeader);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5475296/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpKerberosRequestInterceptor.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpKerberosRequestInterceptor.java
 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpKerberosRequestInterceptor.java
new file mode 100644
index 0000000..786c94d
--- /dev/null
+++ 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpKerberosRequestInterceptor.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.hive.service.auth.HttpAuthUtils;
+import org.apache.http.HttpException;
+import org.apache.http.HttpRequest;
+import org.apache.http.client.CookieStore;
+import org.apache.http.protocol.HttpContext;
+
+import java.util.Map;
+import java.util.concurrent.locks.ReentrantLock;
+
+/**
+ * Authentication interceptor which adds Base64 encoded payload,
+ * containing the username and kerberos service ticket,
+ * to the outgoing http request header.
+ */
+public class HttpKerberosRequestInterceptor extends HttpRequestInterceptorBase 
{
+
+  // A fair reentrant lock
+  private static ReentrantLock kerberosLock = new ReentrantLock(true);
+  String principal;
+  String host;
+  String serverHttpUrl;
+  boolean assumeSubject;
+
+  public HttpKerberosRequestInterceptor(String principal, String host,
+      String serverHttpUrl, boolean assumeSubject, CookieStore cs, String cn,
+      boolean isSSL, Map<String, String> additionalHeaders) {
+    super(cs, cn, isSSL, additionalHeaders);
+    this.principal = principal;
+    this.host = host;
+    this.serverHttpUrl = serverHttpUrl;
+    this.assumeSubject = assumeSubject;
+  }
+
+  @Override
+  protected void addHttpAuthHeader(HttpRequest httpRequest,
+    HttpContext httpContext) throws Exception {
+       try {
+      // Generate the service ticket for sending to the server.
+      // Locking ensures the tokens are unique in case of concurrent requests
+      kerberosLock.lock();
+      String kerberosAuthHeader = HttpAuthUtils.getKerberosServiceTicket(
+              principal, host, serverHttpUrl, assumeSubject);
+      // Set the session key token (Base64 encoded) in the headers
+      httpRequest.addHeader(HttpAuthUtils.AUTHORIZATION + ": " +
+        HttpAuthUtils.NEGOTIATE + " ", kerberosAuthHeader);
+    } catch (Exception e) {
+      throw new HttpException(e.getMessage(), e);
+    } finally {
+      kerberosLock.unlock();
+    }
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5475296/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpRequestInterceptorBase.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpRequestInterceptorBase.java
 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpRequestInterceptorBase.java
new file mode 100644
index 0000000..7dc3c53
--- /dev/null
+++ 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/HttpRequestInterceptorBase.java
@@ -0,0 +1,88 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.hive.client;
+
+import org.apache.http.HttpException;
+import org.apache.http.HttpRequest;
+import org.apache.http.HttpRequestInterceptor;
+import org.apache.http.client.CookieStore;
+import org.apache.http.client.protocol.ClientContext;
+import org.apache.http.protocol.HttpContext;
+
+import java.io.IOException;
+import java.util.Map;
+
+public abstract class HttpRequestInterceptorBase implements 
HttpRequestInterceptor {
+  CookieStore cookieStore;
+  boolean isCookieEnabled;
+  String cookieName;
+  boolean isSSL;
+  Map<String, String> additionalHeaders;
+
+  public HttpRequestInterceptorBase(CookieStore cs, String cn, boolean isSSL,
+                                    Map<String, String> additionalHeaders) {
+    this.cookieStore = cs;
+    this.isCookieEnabled = (cs != null);
+    this.cookieName = cn;
+    this.isSSL = isSSL;
+    this.additionalHeaders = additionalHeaders;
+  }
+
+  // Abstract function to add HttpAuth Header
+  protected abstract void addHttpAuthHeader(HttpRequest httpRequest, 
HttpContext httpContext)
+    throws Exception;
+
+  @Override
+  public void process(HttpRequest httpRequest, HttpContext httpContext)
+    throws HttpException, IOException {
+    try {
+      // If cookie based authentication is allowed, generate ticket only when 
necessary.
+      // The necessary condition is either when there are no server side 
cookies in the
+      // cookiestore which can be send back or when the server returns a 401 
error code
+      // indicating that the previous cookie has expired.
+      if (isCookieEnabled) {
+        httpContext.setAttribute(ClientContext.COOKIE_STORE, cookieStore);
+      }
+      // Generate the kerberos ticket under the following scenarios:
+      // 1. Cookie Authentication is disabled OR
+      // 2. The first time when the request is sent OR
+      // 3. The server returns a 401, which sometimes means the cookie has 
expired
+      // 4. The cookie is secured where as the client connect does not use SSL
+      if (!isCookieEnabled || 
((httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) == null &&
+          (cookieStore == null || (cookieStore != null &&
+          Utils.needToSendCredentials(cookieStore, cookieName, isSSL)))) ||
+          (httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY) != null &&
+          httpContext.getAttribute(Utils.HIVE_SERVER2_RETRY_KEY).
+          equals(Utils.HIVE_SERVER2_RETRY_TRUE)))) {
+        addHttpAuthHeader(httpRequest, httpContext);
+      }
+      if (isCookieEnabled) {
+        httpContext.setAttribute(Utils.HIVE_SERVER2_RETRY_KEY, 
Utils.HIVE_SERVER2_RETRY_FALSE);
+      }
+      // Insert the additional http headers
+      if (additionalHeaders != null) {
+        for (Map.Entry<String, String> entry : additionalHeaders.entrySet()) {
+          httpRequest.addHeader(entry.getKey(), entry.getValue());
+        }
+      }
+    } catch (Exception e) {
+      throw new HttpException(e.getMessage(), e);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5475296/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Utils.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Utils.java
 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Utils.java
index e0dd438..cada587 100644
--- 
a/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Utils.java
+++ 
b/contrib/views/hive/src/main/java/org/apache/ambari/view/hive/client/Utils.java
@@ -20,8 +20,17 @@ package org.apache.ambari.view.hive.client;
 
 import org.apache.hive.service.cli.thrift.TStatus;
 import org.apache.hive.service.cli.thrift.TStatusCode;
+import org.apache.http.client.CookieStore;
+import org.apache.http.cookie.Cookie;
+
+import java.util.List;
 
 public class Utils {
+  // This value is set to true by the setServiceUnavailableRetryStrategy() 
when the server returns 401
+  static final String HIVE_SERVER2_RETRY_KEY = "hive.server2.retryserver";
+  static final String HIVE_SERVER2_RETRY_TRUE = "true";
+  static final String HIVE_SERVER2_RETRY_FALSE = "false";
+
   static void verifySuccess(TStatus status, String comment) throws 
HiveClientException {
     if (status.getStatusCode() != TStatusCode.SUCCESS_STATUS &&
         status.getStatusCode() != TStatusCode.SUCCESS_WITH_INFO_STATUS) {
@@ -30,6 +39,27 @@ public class Utils {
     }
   }
 
+  static boolean needToSendCredentials(CookieStore cookieStore, String 
cookieName, boolean isSSL) {
+    if (cookieName == null || cookieStore == null) {
+      return true;
+    }
+
+    List<Cookie> cookies = cookieStore.getCookies();
+
+    for (Cookie c : cookies) {
+      // If this is a secured cookie and the current connection is non-secured,
+      // then, skip this cookie. We need to skip this cookie because, the 
cookie
+      // replay will not be transmitted to the server.
+      if (c.isSecure() && !isSSL) {
+        continue;
+      }
+      if (c.getName().equals(cookieName)) {
+        return false;
+      }
+    }
+    return true;
+  }
+
   public static class HiveAuthenticationParams {
     public static final String AUTH_TYPE = "auth";
     // We're deprecating this variable's name.
@@ -64,9 +94,24 @@ public class Utils {
     // Default namespace value on ZooKeeper.
     // This value is used if the param "zooKeeperNamespace" is not specified 
in the JDBC Uri.
     public static final String ZOOKEEPER_DEFAULT_NAMESPACE = "hiveserver2";
-
     // Non-configurable params:
     // Currently supports JKS keystore format
     public static final String SSL_TRUST_STORE_TYPE = "JKS";
+    static final String COOKIE_AUTH = "cookieAuth";
+    static final String COOKIE_AUTH_FALSE = "false";
+    static final String COOKIE_NAME = "cookieName";
+    // The default value of the cookie name when CookieAuth=true
+    static final String DEFAULT_COOKIE_NAMES_HS2 = "hive.server2.auth";
+    static final String HTTP_HEADER_PREFIX = "http.header.";
+    // --------------- Begin 2 way ssl options -------------------------
+    // Use two way ssl. This param will take effect only when ssl=true
+    static final String USE_TWO_WAY_SSL = "twoWay";
+    static final String TRUE = "true";
+    static final String SSL_KEY_STORE = "sslKeyStore";
+    static final String SSL_KEY_STORE_PASSWORD = "keyStorePassword";
+    static final String SSL_KEY_STORE_TYPE = "JKS";
+    static final String SUNX509_ALGORITHM_STRING = "SunX509";
+    // --------------- End 2 way ssl options ----------------------------
+    static final String SUNJSSE_ALGORITHM_STRING = "SunJSSE";
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5475296/contrib/views/hive/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hive/src/main/resources/view.xml 
b/contrib/views/hive/src/main/resources/view.xml
index ce0895b..710d077 100644
--- a/contrib/views/hive/src/main/resources/view.xml
+++ b/contrib/views/hive/src/main/resources/view.xml
@@ -45,6 +45,36 @@
     </parameter>
 
     <parameter>
+        <name>hive.http.port</name>
+        <description>HiveServer2 Http port (example: 10001).</description>
+        <label>HiveServer2 Http port</label>
+        <placeholder>10001</placeholder>
+        <default-value>10001</default-value>
+        
<cluster-config>hive-site/hive.server2.thrift.http.port</cluster-config>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>hive.http.path</name>
+        <description>HiveServer2 Http path (example: cliservice).</description>
+        <label>HiveServer2 Http path</label>
+        <placeholder>cliservice</placeholder>
+        <default-value>cliservice</default-value>
+        
<cluster-config>hive-site/hive.server2.thrift.http.path</cluster-config>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
+        <name>hive.transport.mode</name>
+        <description>HiveServer2 Transport Mode (example: 
http/binary).</description>
+        <label>HiveServer2 Transport Mode</label>
+        <placeholder>binary</placeholder>
+        <default-value>binary</default-value>
+        <cluster-config>hive-site/hive.server2.transport.mode</cluster-config>
+        <required>true</required>
+    </parameter>
+
+    <parameter>
       <name>hive.auth</name>
       <description>Semicolon-separated authentication configs.</description>
       <label>Hive Authentication</label>

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5475296/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java
index ba37dc7..bd8dbf9 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/client/ConnectionTest.java
@@ -37,7 +37,7 @@ public class ConnectionTest {
 
     thrown.expect(HiveClientException.class);
     thrown.expectMessage("Connection refused");
-    new Connection("nonexistent.host.com", 10000, auth, "ambari-qa", null);
+    new Connection("127.0.0.1", 42420, auth, "ambari-qa", null);
   }
 
   @Test
@@ -47,7 +47,7 @@ public class ConnectionTest {
     auth.put("password", "${ask_password}");
 
     thrown.expect(HiveAuthRequiredException.class);
-    new Connection("nonexistent.host.com", 10000, auth, "ambari-qa", null);
+    new Connection("127.0.0.1", 42420, auth, "ambari-qa", null);
   }
 
   @Test
@@ -58,6 +58,6 @@ public class ConnectionTest {
 
     thrown.expect(HiveClientException.class);
     thrown.expectMessage("Connection refused");
-    new Connection("nonexistent.host.com", 10000, auth, "ambari-qa", 
"password");
+    new Connection("127.0.0.1", 42420, auth, "ambari-qa", "password");
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/f5475296/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobLDAPServiceTest.java
----------------------------------------------------------------------
diff --git 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobLDAPServiceTest.java
 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobLDAPServiceTest.java
index 7be8bc9..dfdcb34 100644
--- 
a/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobLDAPServiceTest.java
+++ 
b/contrib/views/hive/src/test/java/org/apache/ambari/view/hive/resources/jobs/JobLDAPServiceTest.java
@@ -64,8 +64,9 @@ public class JobLDAPServiceTest extends BaseHiveTest {
   @Override
   protected void setupProperties(Map<String, String> properties, File baseDir) 
throws Exception {
     super.setupProperties(properties, baseDir);
-    properties.put("hive.host", "nonexistent.host.com");
-    properties.put("hive.port", "10000");
+    properties.put("hive.transport.mode", "binary");
+    properties.put("hive.host", "127.0.0.1");
+    properties.put("hive.port", "42420");
 
     properties.put("scripts.dir", "/tmp/.hiveQueries");
     properties.put("jobs.dir", "/tmp/.hiveJobs");

Reply via email to