Repository: hive
Updated Branches:
  refs/heads/hive-14535 81853c12e -> 42a38577b


HIVE-17318: Make Hikari CP configurable using hive properties in hive-site.xml 
(Barna Zsombor Klara, via Peter Vary)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6b103828
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6b103828
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6b103828

Branch: refs/heads/hive-14535
Commit: 6b103828863dd00a8745311d50c26a1db09fae34
Parents: 835c066
Author: Peter Vary <pv...@cloudera.com>
Authored: Fri Aug 25 16:01:10 2017 +0200
Committer: Peter Vary <pv...@cloudera.com>
Committed: Fri Aug 25 16:01:10 2017 +0200

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  3 +-
 .../datasource/DataSourceProviderFactory.java   |  2 +-
 .../datasource/HikariCPDataSourceProvider.java  | 97 ++++++++++++++++++++
 .../hadoop/hive/metastore/txn/TxnHandler.java   | 14 +--
 .../TestDataSourceProviderFactory.java          | 60 ++++++++++++
 5 files changed, 162 insertions(+), 14 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/6b103828/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 5a2a436..0d8d7ae 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -3464,7 +3464,8 @@ public class HiveConf extends Configuration {
             "hive.spark.client.secret.bits," +
             "hive.spark.client.rpc.server.address," +
             "hive.spark.client.rpc.server.port," +
-            "bonecp.",
+            "bonecp.,"+
+            "hikari.",
         "Comma separated list of configuration options which are immutable at 
runtime"),
     HIVE_CONF_HIDDEN_LIST("hive.conf.hidden.list",
         METASTOREPWD.varname + "," + HIVE_SERVER2_SSL_KEYSTORE_PASSWORD.varname

http://git-wip-us.apache.org/repos/asf/hive/blob/6b103828/metastore/src/java/org/apache/hadoop/hive/metastore/datasource/DataSourceProviderFactory.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/datasource/DataSourceProviderFactory.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/datasource/DataSourceProviderFactory.java
index fa6bb1c..1eb792c 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/datasource/DataSourceProviderFactory.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/datasource/DataSourceProviderFactory.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.conf.Configuration;
 public abstract  class DataSourceProviderFactory {
 
   private static final ImmutableList<DataSourceProvider> FACTORIES =
-      ImmutableList.<DataSourceProvider>builder().add(new 
BoneCPDataSourceProvider()).build();
+      ImmutableList.<DataSourceProvider>builder().add(new 
HikariCPDataSourceProvider(), new BoneCPDataSourceProvider()).build();
 
   /**
    * @param hdpConfig hadoop configuration

http://git-wip-us.apache.org/repos/asf/hive/blob/6b103828/metastore/src/java/org/apache/hadoop/hive/metastore/datasource/HikariCPDataSourceProvider.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/datasource/HikariCPDataSourceProvider.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/datasource/HikariCPDataSourceProvider.java
new file mode 100644
index 0000000..9b3d6d5
--- /dev/null
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/datasource/HikariCPDataSourceProvider.java
@@ -0,0 +1,97 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.metastore.datasource;
+
+import com.zaxxer.hikari.HikariConfig;
+import com.zaxxer.hikari.HikariDataSource;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.sql.DataSource;
+import java.sql.SQLException;
+import java.util.Properties;
+
+/**
+ * DataSourceProvider for the HikariCP connection pool.
+ */
+public class HikariCPDataSourceProvider implements DataSourceProvider {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(HikariCPDataSourceProvider.class);
+
+  public static final String HIKARI = "hikari";
+  private static final String CONNECTION_TIMEOUT_PROPERTY= 
"hikari.connectionTimeout";
+
+  @Override
+  public DataSource create(Configuration hdpConfig) throws SQLException {
+
+    LOG.debug("Creating Hikari connection pool for the MetaStore");
+
+    String driverUrl = DataSourceProvider.getMetastoreJdbcDriverUrl(hdpConfig);
+    String user = DataSourceProvider.getMetastoreJdbcUser(hdpConfig);
+    String passwd = DataSourceProvider.getMetastoreJdbcPasswd(hdpConfig);
+    int maxPoolSize = hdpConfig.getInt(
+        MetastoreConf.ConfVars.CONNECTION_POOLING_MAX_CONNECTIONS.varname,
+        
((Long)MetastoreConf.ConfVars.CONNECTION_POOLING_MAX_CONNECTIONS.defaultVal).intValue());
+
+    Properties properties = replacePrefix(
+        DataSourceProvider.getPrefixedProperties(hdpConfig, HIKARI));
+    long connectionTimeout = hdpConfig.getLong(CONNECTION_TIMEOUT_PROPERTY, 
30000L);
+    HikariConfig config = null;
+    try {
+      config = new HikariConfig(properties);
+    } catch (Exception e) {
+      throw new SQLException("Cannot create HikariCP configuration: ", e);
+    }
+    config.setMaximumPoolSize(maxPoolSize);
+    config.setJdbcUrl(driverUrl);
+    config.setUsername(user);
+    config.setPassword(passwd);
+    //https://github.com/brettwooldridge/HikariCP
+    config.setConnectionTimeout(connectionTimeout);
+    return new HikariDataSource(config);
+  }
+
+  @Override
+  public boolean mayReturnClosedConnection() {
+    // Only BoneCP should return true
+    return false;
+  }
+
+  @Override
+  public boolean supports(Configuration configuration) {
+    String poolingType =
+        configuration.get(
+            
MetastoreConf.ConfVars.CONNECTION_POOLING_TYPE.varname).toLowerCase();
+    if (HIKARI.equals(poolingType)) {
+      int hikariPropsNr = 
DataSourceProvider.getPrefixedProperties(configuration, HIKARI).size();
+      LOG.debug("Found " + hikariPropsNr + " nr. of hikari specific 
configurations");
+      return hikariPropsNr > 0;
+    }
+    LOG.debug("Configuration requested " + poolingType + " pooling, 
HikariCpDSProvider exiting");
+    return false;
+  }
+
+  private Properties replacePrefix(Properties props) {
+    Properties newProps = new Properties();
+    props.forEach((key,value) ->
+        newProps.put(key.toString().replaceFirst(HIKARI + ".", ""), value));
+    return newProps;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/6b103828/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
index a6d5613..71e7c0c 100644
--- a/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
+++ b/metastore/src/java/org/apache/hadoop/hive/metastore/txn/TxnHandler.java
@@ -18,8 +18,6 @@
 package org.apache.hadoop.hive.metastore.txn;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.zaxxer.hikari.HikariConfig;
-import com.zaxxer.hikari.HikariDataSource;
 
 import org.apache.commons.dbcp.ConnectionFactory;
 import org.apache.commons.dbcp.DriverManagerConnectionFactory;
@@ -32,9 +30,9 @@ import 
org.apache.hadoop.hive.common.classification.RetrySemantics;
 import org.apache.hadoop.hive.metastore.DatabaseProduct;
 import org.apache.hadoop.hive.metastore.HouseKeeperService;
 import org.apache.hadoop.hive.metastore.Warehouse;
-import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
 import org.apache.hadoop.hive.metastore.datasource.BoneCPDataSourceProvider;
 import org.apache.hadoop.hive.metastore.datasource.DataSourceProvider;
+import org.apache.hadoop.hive.metastore.datasource.HikariCPDataSourceProvider;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.commons.dbcp.PoolingDataSource;
@@ -3187,15 +3185,7 @@ abstract class TxnHandler implements TxnStore, 
TxnStore.MutexAPI {
           new PoolableConnectionFactory(connFactory, objectPool, null, null, 
false, true);
       return new PoolingDataSource(objectPool);
     } else if ("hikaricp".equals(connectionPooler)) {
-      HikariConfig config = new HikariConfig();
-      config.setMaximumPoolSize(maxPoolSize);
-      config.setJdbcUrl(driverUrl);
-      config.setUsername(user);
-      config.setPassword(passwd);
-      //https://github.com/brettwooldridge/HikariCP
-      config.setConnectionTimeout(getConnectionTimeoutMs);
-
-      return new HikariDataSource(config);
+      return new HikariCPDataSourceProvider().create(conf);
     } else if ("none".equals(connectionPooler)) {
       LOG.info("Choosing not to pool JDBC connections");
       return new NoPoolConnectionPool(conf);

http://git-wip-us.apache.org/repos/asf/hive/blob/6b103828/metastore/src/test/org/apache/hadoop/hive/metastore/datasource/TestDataSourceProviderFactory.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/test/org/apache/hadoop/hive/metastore/datasource/TestDataSourceProviderFactory.java
 
b/metastore/src/test/org/apache/hadoop/hive/metastore/datasource/TestDataSourceProviderFactory.java
index 6284604..856ed67 100644
--- 
a/metastore/src/test/org/apache/hadoop/hive/metastore/datasource/TestDataSourceProviderFactory.java
+++ 
b/metastore/src/test/org/apache/hadoop/hive/metastore/datasource/TestDataSourceProviderFactory.java
@@ -18,6 +18,7 @@
 package org.apache.hadoop.hive.metastore.datasource;
 
 import com.jolbox.bonecp.BoneCPDataSource;
+import com.zaxxer.hikari.HikariDataSource;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.junit.Assert;
 import org.junit.Before;
@@ -104,4 +105,63 @@ public class TestDataSourceProviderFactory {
     Assert.assertTrue(ds instanceof BoneCPDataSource);
     Assert.assertEquals(true, ((BoneCPDataSource)ds).isDisableJMX());
   }
+
+  @Test
+  public void testCreateHikariCpDataSource() throws SQLException {
+
+    conf.setVar(HiveConf.ConfVars.METASTORE_CONNECTION_POOLING_TYPE, 
HikariCPDataSourceProvider.HIKARI);
+    // This is needed to prevent the HikariDataSource from trying to connect 
to the DB
+    conf.set(HikariCPDataSourceProvider.HIKARI + ".initializationFailTimeout", 
"-1");
+
+    DataSourceProvider dsp = 
DataSourceProviderFactory.getDataSourceProvider(conf);
+    Assert.assertNotNull(dsp);
+
+    DataSource ds = dsp.create(conf);
+    Assert.assertTrue(ds instanceof HikariDataSource);
+  }
+
+  @Test
+  public void testSetHikariCpStringProperty() throws SQLException {
+
+    conf.setVar(HiveConf.ConfVars.METASTORE_CONNECTION_POOLING_TYPE, 
HikariCPDataSourceProvider.HIKARI);
+    conf.set(HikariCPDataSourceProvider.HIKARI + ".connectionInitSql", "select 
1 from dual");
+    conf.set(HikariCPDataSourceProvider.HIKARI + ".initializationFailTimeout", 
"-1");
+
+    DataSourceProvider dsp = 
DataSourceProviderFactory.getDataSourceProvider(conf);
+    Assert.assertNotNull(dsp);
+
+    DataSource ds = dsp.create(conf);
+    Assert.assertTrue(ds instanceof HikariDataSource);
+    Assert.assertEquals("select 1 from dual", 
((HikariDataSource)ds).getConnectionInitSql());
+  }
+
+  @Test
+  public void testSetHikariCpNumberProperty() throws SQLException {
+
+    conf.setVar(HiveConf.ConfVars.METASTORE_CONNECTION_POOLING_TYPE, 
HikariCPDataSourceProvider.HIKARI);
+    conf.set(HikariCPDataSourceProvider.HIKARI + ".idleTimeout", "59999");
+    conf.set(HikariCPDataSourceProvider.HIKARI + ".initializationFailTimeout", 
"-1");
+
+    DataSourceProvider dsp = 
DataSourceProviderFactory.getDataSourceProvider(conf);
+    Assert.assertNotNull(dsp);
+
+    DataSource ds = dsp.create(conf);
+    Assert.assertTrue(ds instanceof HikariDataSource);
+    Assert.assertEquals(59999L, ((HikariDataSource)ds).getIdleTimeout());
+  }
+
+  @Test
+  public void testSetHikariCpBooleanProperty() throws SQLException {
+
+    conf.setVar(HiveConf.ConfVars.METASTORE_CONNECTION_POOLING_TYPE, 
HikariCPDataSourceProvider.HIKARI);
+    conf.set(HikariCPDataSourceProvider.HIKARI + ".allowPoolSuspension", 
"false");
+    conf.set(HikariCPDataSourceProvider.HIKARI + ".initializationFailTimeout", 
"-1");
+
+    DataSourceProvider dsp = 
DataSourceProviderFactory.getDataSourceProvider(conf);
+    Assert.assertNotNull(dsp);
+
+    DataSource ds = dsp.create(conf);
+    Assert.assertTrue(ds instanceof HikariDataSource);
+    Assert.assertEquals(false, ((HikariDataSource)ds).isAllowPoolSuspension());
+  }
 }

Reply via email to