This is an automated email from the ASF dual-hosted git repository. jcamacho pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push: new 0c040fb HIVE-24092: Implement jdbc methods invoked by Calcite (Kishen Das, reviewed by Jesus Camacho Rodriguez) 0c040fb is described below commit 0c040fb786c0deadfad3259cb2c9230938228d8f Author: kishendas <kishen....@gmail.com> AuthorDate: Thu Sep 10 08:40:41 2020 -0700 HIVE-24092: Implement jdbc methods invoked by Calcite (Kishen Das, reviewed by Jesus Camacho Rodriguez) Closes apache/hive#1443 --- .../apache/hive/jdbc/TestHiveDatabaseMetaData.java | 110 +++++++++++++++++++++ .../TestThriftCliServiceWithInfoMessage.java | 7 ++ .../java/org/apache/hive/jdbc/HiveConnection.java | 38 +++++++ .../org/apache/hive/jdbc/HiveDatabaseMetaData.java | 47 ++++++--- jdbc/src/java/org/apache/hive/jdbc/Utils.java | 8 +- .../hive/service/cli/thrift/ThriftCLIService.java | 9 +- 6 files changed, 202 insertions(+), 17 deletions(-) diff --git a/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestHiveDatabaseMetaData.java b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestHiveDatabaseMetaData.java new file mode 100644 index 0000000..2e5128e --- /dev/null +++ b/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestHiveDatabaseMetaData.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hive.jdbc; + +import org.apache.hive.jdbc.HiveConnection; +import org.apache.hive.jdbc.Utils; +import org.apache.hive.jdbc.Utils.JdbcConnectionParams; + +import java.util.LinkedHashMap; +import java.util.Properties; +import java.util.Map; +import java.util.HashMap; +import java.sql.SQLException; + +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.*; + +/** + * TestHiveDatabaseMetaData. + * + */ +public class TestHiveDatabaseMetaData { + + private Map<String, String> map = new LinkedHashMap<String, String>(); + private JdbcConnectionParams jdbcConnectionParams = new JdbcConnectionParams(); + private HiveDatabaseMetaData hiveDatabaseMetaData; + private HiveConnection connection = new HiveConnection(); + + @Before + public void setup() throws Exception { + jdbcConnectionParams.setHiveConfs(map); + connection.setConnParams(jdbcConnectionParams); + hiveDatabaseMetaData = new HiveDatabaseMetaData(connection, null, null); + } + + @Test + public void testGetHiveDefaultNullsLastNullConfig() { + map.remove(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY); + try { + hiveDatabaseMetaData.nullsAreSortedLow(); + fail("SQLException is expected"); + } catch (Exception e) { + assertTrue(e.getMessage().contains("HIVE_DEFAULT_NULLS_LAST is not available")); + } + } + + @Test + public void testGetHiveDefaultNullsLast() throws SQLException { + map.put(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY, "true"); + assertTrue(hiveDatabaseMetaData.getHiveDefaultNullsLast(map)); + + map.put(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY, "false"); + assertFalse(hiveDatabaseMetaData.getHiveDefaultNullsLast(map)); + + } + + @Test + public void testNullsAreSortedHigh() throws SQLException { + map.put(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY, "false"); + assertFalse(hiveDatabaseMetaData.nullsAreSortedHigh()); + map.put(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY, "true"); + assertTrue(hiveDatabaseMetaData.nullsAreSortedHigh()); + } + + @Test + public void testNullsAreSortedLow() throws SQLException { + map.put(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY, "false"); + assertTrue(hiveDatabaseMetaData.nullsAreSortedLow()); + map.put(Utils.JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY, "true"); + assertFalse(hiveDatabaseMetaData.nullsAreSortedLow()); + } + + @Test + public void testHiveConnectionUdateServerHiveConf() { + Map<String, String> serverHiveConf = new HashMap<>(); + serverHiveConf.put("hive.server2.thrift.resultset.default.fetch.size", Integer.toString(87)); + serverHiveConf.put("hive.default.nulls.last", "false"); + + jdbcConnectionParams.getHiveConfs().put(Utils.JdbcConnectionParams.HIVE_CONF_PREFIX + + "hive.server2.thrift.resultset.default.fetch.size", "1534"); + connection.updateServerHiveConf(serverHiveConf, jdbcConnectionParams); + + // Client configuration should not be overridden by the server configuration. + assertEquals("1534", jdbcConnectionParams.getHiveConfs().get(Utils.JdbcConnectionParams.HIVE_CONF_PREFIX + + "hive.server2.thrift.resultset.default.fetch.size")); + + // Server configuration should be updated, since its not provided by the client. + assertEquals("false", jdbcConnectionParams.getHiveConfs() + .get(Utils.JdbcConnectionParams.HIVE_CONF_PREFIX + "hive.default.nulls.last")); + + } +} \ No newline at end of file diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceWithInfoMessage.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceWithInfoMessage.java index ca4fbc6..cd114a1 100644 --- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceWithInfoMessage.java +++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftCliServiceWithInfoMessage.java @@ -72,6 +72,8 @@ public class TestThriftCliServiceWithInfoMessage { hiveConf.setVar(ConfVars.HIVE_SERVER2_AUTHENTICATION, HiveAuthConstants.AuthTypes.NOSASL.toString()); hiveConf.setVar(ConfVars.HIVE_SERVER2_TRANSPORT_MODE, "binary"); hiveConf.setIntVar(ConfVars.HIVE_SERVER2_WEBUI_PORT, webuiPort); + hiveConf.setBoolVar(ConfVars.HIVE_DEFAULT_NULLS_LAST, true); + // Enable showing operation drilldown link hiveConf.setBoolVar(ConfVars.HIVE_SERVER2_SHOW_OPERATION_DRILLDOWN_LINK, true); hiveServer2 = new HiveServer2(); @@ -95,6 +97,11 @@ public class TestThriftCliServiceWithInfoMessage { TOpenSessionReq openReq = new TOpenSessionReq(); openReq.setClient_protocol(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V11); TOpenSessionResp sessionResp = client.OpenSession(openReq); + + Map<String, String> serverHiveConf = sessionResp.getConfiguration(); + assertNotNull(serverHiveConf); + assertTrue(Boolean.parseBoolean(serverHiveConf.get(ConfVars.HIVE_DEFAULT_NULLS_LAST.varname))); + TSessionHandle sessHandle = sessionResp.getSessionHandle(); TExecuteStatementReq execReq = new TExecuteStatementReq(sessHandle, "select 1"); execReq.setRunAsync(true); diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java index fd0f569..e473521 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java @@ -18,6 +18,7 @@ package org.apache.hive.jdbc; +import com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; @@ -121,6 +122,7 @@ import java.util.Properties; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.ReentrantLock; +import java.util.stream.Stream; /** * HiveConnection. @@ -242,6 +244,13 @@ public class HiveConnection implements java.sql.Connection { return url.toString(); } + @VisibleForTesting + public HiveConnection() { + sessConfMap = null; + isEmbeddedMode = true; + initFetchSize = 0; + } + public HiveConnection(String uri, Properties info) throws SQLException { setupLoginTimeout(); try { @@ -866,6 +875,13 @@ public class HiveConnection implements java.sql.Connection { try { TOpenSessionResp openResp = client.OpenSession(openReq); + // Populate a given configuration from HS2 server HiveConf, only if that configuration + // is not already present in Connection parameter HiveConf i.e., client side configuration + // takes precedence over the server side configuration. + Map<String, String> serverHiveConf = openResp.getConfiguration(); + + updateServerHiveConf(serverHiveConf, connParams); + // validate connection Utils.verifySuccess(openResp.getStatus()); if (!supportedProtocols.contains(openResp.getServerProtocolVersion())) { @@ -891,6 +907,20 @@ public class HiveConnection implements java.sql.Connection { isClosed = false; } + @VisibleForTesting + public void updateServerHiveConf(Map<String, String> serverHiveConf, JdbcConnectionParams connParams) { + if (serverHiveConf != null) { + // Iterate over all Server configurations. + Stream.of(ConfVars.values()).forEach(conf -> { + String key = JdbcConnectionParams.HIVE_CONF_PREFIX + conf.varname; + // Update Server HiveConf, only if a given configuration is not already set from the client. + if (serverHiveConf.containsKey(conf.varname) && !connParams.getHiveConfs().containsKey(key)) { + connParams.getHiveConfs().put(key, serverHiveConf.get(conf.varname)); + } + }); + } + } + /** * @return username from sessConfMap */ @@ -1714,6 +1744,14 @@ public class HiveConnection implements java.sql.Connection { return protocol; } + public JdbcConnectionParams getConnParams() { + return connParams; + } + + public JdbcConnectionParams setConnParams(JdbcConnectionParams jdbcConnectionParams) { + return connParams = jdbcConnectionParams; + } + public static TCLIService.Iface newSynchronizedClient( TCLIService.Iface client) { return (TCLIService.Iface) Proxy.newProxyInstance( diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java b/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java index b081edb..9998646 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java +++ b/jdbc/src/java/org/apache/hive/jdbc/HiveDatabaseMetaData.java @@ -21,7 +21,11 @@ package org.apache.hive.jdbc; import java.util.ArrayList; import java.util.List; + +import jline.internal.Log; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.metastore.api.FieldSchema; +import org.apache.hive.jdbc.Utils.JdbcConnectionParams; import org.apache.hive.service.cli.TableSchema; import java.sql.Connection; import java.sql.DatabaseMetaData; @@ -31,6 +35,7 @@ import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.util.Arrays; import java.util.Comparator; +import java.util.Map; import java.util.jar.Attributes; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hive.service.cli.GetInfoType; @@ -855,19 +860,19 @@ public class HiveDatabaseMetaData implements DatabaseMetaData { } public boolean nullsAreSortedAtEnd() throws SQLException { - throw new SQLFeatureNotSupportedException("Method not supported"); + return false; } public boolean nullsAreSortedAtStart() throws SQLException { - throw new SQLFeatureNotSupportedException("Method not supported"); + return false; } public boolean nullsAreSortedHigh() throws SQLException { - throw new SQLFeatureNotSupportedException("Method not supported"); + return getHiveDefaultNullsLast(connection.getConnParams().getHiveConfs()); } public boolean nullsAreSortedLow() throws SQLException { - throw new SQLFeatureNotSupportedException("Method not supported"); + return !getHiveDefaultNullsLast(connection.getConnParams().getHiveConfs()); } public boolean othersDeletesAreVisible(int type) throws SQLException { @@ -895,27 +900,27 @@ public class HiveDatabaseMetaData implements DatabaseMetaData { } public boolean storesLowerCaseIdentifiers() throws SQLException { - throw new SQLFeatureNotSupportedException("Method not supported"); + return true; } public boolean storesLowerCaseQuotedIdentifiers() throws SQLException { - throw new SQLFeatureNotSupportedException("Method not supported"); + return true; } public boolean storesMixedCaseIdentifiers() throws SQLException { - throw new SQLFeatureNotSupportedException("Method not supported"); + return false; } public boolean storesMixedCaseQuotedIdentifiers() throws SQLException { - throw new SQLFeatureNotSupportedException("Method not supported"); + return false; } public boolean storesUpperCaseIdentifiers() throws SQLException { - throw new SQLFeatureNotSupportedException("Method not supported"); + return false; } public boolean storesUpperCaseQuotedIdentifiers() throws SQLException { - throw new SQLFeatureNotSupportedException("Method not supported"); + return false; } public boolean supportsANSI92EntryLevelSQL() throws SQLException { @@ -1040,11 +1045,11 @@ public class HiveDatabaseMetaData implements DatabaseMetaData { } public boolean supportsMixedCaseIdentifiers() throws SQLException { - throw new SQLFeatureNotSupportedException("Method not supported"); + return false; } public boolean supportsMixedCaseQuotedIdentifiers() throws SQLException { - throw new SQLFeatureNotSupportedException("Method not supported"); + return false; } public boolean supportsMultipleOpenResults() throws SQLException { @@ -1227,4 +1232,20 @@ public class HiveDatabaseMetaData implements DatabaseMetaData { Utils.verifySuccess(resp.getStatus()); return resp; } -} + + /** + * This returns Hive configuration for HIVE_DEFAULT_NULLS_LAST. + * + * @param hiveConfs + * @return + */ + public static boolean getHiveDefaultNullsLast(Map<String, String> hiveConfs) throws SQLException { + if (hiveConfs == null) { + throw new SQLException("hiveConfs is not available"); + } + if (hiveConfs.get(JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY) == null) { + throw new SQLException("HIVE_DEFAULT_NULLS_LAST is not available"); + } + return Boolean.parseBoolean(hiveConfs.get(JdbcConnectionParams.HIVE_DEFAULT_NULLS_LAST_KEY)); + } +} \ No newline at end of file diff --git a/jdbc/src/java/org/apache/hive/jdbc/Utils.java b/jdbc/src/java/org/apache/hive/jdbc/Utils.java index 90412e4..737a5f7 100644 --- a/jdbc/src/java/org/apache/hive/jdbc/Utils.java +++ b/jdbc/src/java/org/apache/hive/jdbc/Utils.java @@ -31,6 +31,8 @@ import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hive.service.cli.HiveSQLException; import org.apache.hive.service.rpc.thrift.TStatus; import org.apache.hive.service.rpc.thrift.TStatusCode; @@ -168,7 +170,7 @@ public class Utils { static final String SSL_TRUST_STORE_TYPE = "JKS"; private static final String HIVE_VAR_PREFIX = "hivevar:"; - private static final String HIVE_CONF_PREFIX = "hiveconf:"; + public static final String HIVE_CONF_PREFIX = "hiveconf:"; private String host = null; private int port = 0; private String jdbcUriString; @@ -187,6 +189,10 @@ public class Utils { private String currentHostZnodePath; private final List<String> rejectedHostZnodePaths = new ArrayList<String>(); + // HiveConf parameters + public static final String HIVE_DEFAULT_NULLS_LAST_KEY = + HIVE_CONF_PREFIX + HiveConf.ConfVars.HIVE_DEFAULT_NULLS_LAST.varname; + public JdbcConnectionParams() { } diff --git a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java index 66beddf..9255d9f 100644 --- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java +++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java @@ -29,6 +29,7 @@ import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.security.auth.login.LoginException; @@ -326,10 +327,12 @@ public abstract class ThriftCLIService extends AbstractService implements TCLISe final int fetchSize = hiveConf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE); + Map<String, String> map = new HashMap<>(); + map.put(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE.varname, Integer.toString(fetchSize)); + map.put(HiveConf.ConfVars.HIVE_DEFAULT_NULLS_LAST.varname, + String.valueOf(hiveConf.getBoolVar(ConfVars.HIVE_DEFAULT_NULLS_LAST))); resp.setSessionHandle(sessionHandle.toTSessionHandle()); - resp.setConfiguration(Collections - .singletonMap(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_RESULTSET_DEFAULT_FETCH_SIZE.varname, - Integer.toString(fetchSize))); + resp.setConfiguration(map); resp.setStatus(OK_STATUS); ThriftCLIServerContext context = (ThriftCLIServerContext)currentServerContext.get();