This is an automated email from the ASF dual-hosted git repository.
difin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 79ec7f37fb0 HIVE-28782: Ensure Hive constructs URI's with IPv6
literals safely. (#5758) (Dmitriy Fingerman, reviewed by Ayush Saxena)
79ec7f37fb0 is described below
commit 79ec7f37fb0170acfc00dc630549d8c8090f5028
Author: Dmitriy Fingerman <[email protected]>
AuthorDate: Tue Apr 15 06:58:58 2025 -0400
HIVE-28782: Ensure Hive constructs URI's with IPv6 literals safely. (#5758)
(Dmitriy Fingerman, reviewed by Ayush Saxena)
* HIVE-28782: Ensure Hive constructs URI's with IPv6 literals safely.
---
.../HiveSiteHS2ConnectionFileParser.java | 8 +-
.../org/apache/hive/beeline/ProxyAuthTest.java | 28 +++---
.../hadoop/hive/common/auth/HiveAuthUtils.java | 2 +-
.../hadoop/hive/contrib/serde2/TestRegexSerDe.java | 2 +-
.../hadoop/hive/hbase/HBaseStorageHandler.java | 2 +-
.../hadoop/hive/hbase/TestHBaseStorageHandler.java | 2 +-
.../java/org/apache/hive/hcatalog/MiniCluster.java | 2 +-
.../org/apache/hive/hcatalog/templeton/Main.java | 3 +-
.../templeton/tool/TestTempletonUtils.java | 2 +-
.../org/apache/iceberg/mr/hive/TestHiveShell.java | 2 +-
.../hive/hcatalog/hbase/ManyMiniCluster.java | 2 +-
.../hive/minikdc/TestHs2HooksWithMiniKdc.java | 2 +-
.../metastore/security/TestHadoopAuthBridge23.java | 2 +-
.../org/apache/hadoop/hive/hooks/TestHs2Hooks.java | 2 +-
.../hive/metastore/TestReplChangeManager.java | 2 +-
.../hadoop/hive/ql/TestWarehouseDnsPath.java | 2 +-
.../thrift/TestThriftHttpCLIServiceFeatures.java | 5 +-
.../java/org/apache/hive/jdbc/HiveConnection.java | 3 +-
jdbc/src/java/org/apache/hive/jdbc/Utils.java | 5 +-
.../hive/jdbc/ZooKeeperHiveClientHelper.java | 2 +-
.../hadoop/hive/kafka/KafkaBrokerResource.java | 2 +-
.../helpers/LlapTaskUmbilicalServer.java | 3 +-
.../llap/daemon/services/impl/LlapWebServices.java | 2 +-
.../llap/shufflehandler/TestShuffleHandler.java | 2 +-
.../hive/llap/tezplugins/LlapTaskCommunicator.java | 2 +-
.../org/apache/hadoop/hive/ql/TestTxnCommands.java | 2 +-
.../hive/serde2/TestTCTLSeparatedProtocol.java | 2 +-
.../hive/service/auth/saml/HiveSamlUtils.java | 2 +-
.../service/server/HS2ActivePassiveHARegistry.java | 2 +-
.../apache/hive/service/server/HiveServer2.java | 2 +-
.../cli/TestCLIServiceConnectionLimits.java | 2 +-
.../cli/TestRetryingThriftCLIServiceClient.java | 2 +-
.../cli/operation/TestOperationLogManager.java | 2 +-
.../cli/session/TestSessionManagerMetrics.java | 2 +-
.../hive/service/server/TestHS2HttpServer.java | 2 +-
.../apache/hadoop}/hive/common/IPStackUtils.java | 18 +++-
.../hadoop}/hive/common/IPStackUtilsTest.java | 109 ++++++++++++++-------
.../hadoop/hive/metastore/HiveMetaStore.java | 3 +-
.../hadoop/hive/metastore/MetaStoreTestUtils.java | 3 +-
.../hive/metastore/TestHiveMetaStoreTimeout.java | 2 +-
40 files changed, 147 insertions(+), 99 deletions(-)
diff --git
a/beeline/src/java/org/apache/hive/beeline/hs2connection/HiveSiteHS2ConnectionFileParser.java
b/beeline/src/java/org/apache/hive/beeline/hs2connection/HiveSiteHS2ConnectionFileParser.java
index ec596779533..4832589082e 100644
---
a/beeline/src/java/org/apache/hive/beeline/hs2connection/HiveSiteHS2ConnectionFileParser.java
+++
b/beeline/src/java/org/apache/hive/beeline/hs2connection/HiveSiteHS2ConnectionFileParser.java
@@ -26,6 +26,7 @@
import org.apache.hadoop.hive.common.ServerUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -119,8 +120,7 @@ private void addHosts(Properties props) throws
BeelineHS2ConnectionFileParseExce
}
}
- private void addZKServiceDiscoveryHosts(Properties props)
- throws BeelineHS2ConnectionFileParseException {
+ private void addZKServiceDiscoveryHosts(Properties props) {
props.setProperty("serviceDiscoveryMode", "zooKeeper");
props.setProperty("zooKeeperNamespace",
HiveConf.getVar(conf, ConfVars.HIVE_SERVER2_ZOOKEEPER_NAMESPACE));
@@ -141,7 +141,9 @@ private void addDefaultHS2Hosts(Properties props) throws
BeelineHS2ConnectionFil
}
int portNum = getPortNum(
"http".equalsIgnoreCase(HiveConf.getVar(conf,
ConfVars.HIVE_SERVER2_TRANSPORT_MODE)));
- props.setProperty("hosts", serverIPAddress.getHostName() + ":" + portNum);
+ // The hosts property is used in the constructing connection URL,
serverIPAddress.getHostName() might return an
+ // IP address depending on the configuration, hence need to properly
escape a possible IPv6 literal
+ props.setProperty("hosts",
IPStackUtils.concatHostPort(serverIPAddress.getHostName(), portNum));
}
private int getPortNum(boolean isHttp) {
diff --git a/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
b/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
index 52f3ebfe327..9a93556e5b4 100644
--- a/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
+++ b/beeline/src/test/org/apache/hive/beeline/ProxyAuthTest.java
@@ -26,10 +26,10 @@
import java.sql.Statement;
import org.apache.commons.io.FileUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.jdbc.HiveConnection;
import org.apache.hive.service.auth.HiveAuthConstants;
import org.apache.hive.service.cli.session.SessionUtils;
-import org.apache.hive.beeline.BeeLine;
import org.apache.hadoop.hive.shims.Utils;
/**
@@ -84,7 +84,7 @@ public static void main(String[] args) throws Exception {
/*
* Connect via kerberos and get delegation token
*/
- url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" +
serverPrincipal;
+ url = String.format("jdbc:hive2://%s/default;principal=%s",
IPStackUtils.concatHostPort(host, port), serverPrincipal);
con = DriverManager.getConnection(url);
System.out.println("Connected successfully to " + url);
// get delegation token for the given proxy user
@@ -98,7 +98,7 @@ public static void main(String[] args) throws Exception {
System.setProperty(BEELINE_EXIT, "true");
// connect using principal via Beeline with inputStream
- url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" +
serverPrincipal;
+ url = String.format("jdbc:hive2://%s/default;principal=%s",
IPStackUtils.concatHostPort(host, port), serverPrincipal);
currentResultFile = generateSQL(null);
beeLineArgs = new String[] { "-u", url, "-n", "foo", "-p", "bar"};
System.out.println("Connection with kerberos, user/password via args,
using input rediction");
@@ -106,7 +106,7 @@ public static void main(String[] args) throws Exception {
compareResults( currentResultFile);
// connect using principal via Beeline with inputStream
- url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" +
serverPrincipal;
+ url = String.format("jdbc:hive2://%s/default;principal=%s",
IPStackUtils.concatHostPort(host, port), serverPrincipal);
currentResultFile = generateSQL(null);
beeLineArgs = new String[] { "-u", url, "-n", "foo", "-p", "bar", "-f" ,
scriptFileName};
System.out.println("Connection with kerberos, user/password via args,
using input script");
@@ -114,7 +114,7 @@ public static void main(String[] args) throws Exception {
compareResults( currentResultFile);
// connect using principal via Beeline with inputStream
- url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" +
serverPrincipal;
+ url = String.format("jdbc:hive2://%s/default;principal=%s",
IPStackUtils.concatHostPort(host, port), serverPrincipal);
currentResultFile = generateSQL(url+ " foo bar ");
beeLineArgs = new String[] { "-u", url, "-f" , scriptFileName};
System.out.println("Connection with kerberos, user/password via connect,
using input script");
@@ -122,7 +122,7 @@ public static void main(String[] args) throws Exception {
compareResults( currentResultFile);
// connect using principal via Beeline with inputStream
- url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" +
serverPrincipal;
+ url = String.format("jdbc:hive2://%s/default;principal=%s",
IPStackUtils.concatHostPort(host, port), serverPrincipal);
currentResultFile = generateSQL(url+ " foo bar ");
beeLineArgs = new String[] { "-u", url, "-f" , scriptFileName};
System.out.println("Connection with kerberos, user/password via connect,
using input redirect");
@@ -134,14 +134,14 @@ public static void main(String[] args) throws Exception {
*/
System.out.println("Store token into ugi and try");
storeTokenInJobConf(token);
- url = "jdbc:hive2://" + host + ":" + port +
"/default;auth=delegationToken";
+ url = String.format("jdbc:hive2://%s/default;auth=delegationToken",
IPStackUtils.concatHostPort(host, port));
con = DriverManager.getConnection(url);
System.out.println("Connecting to " + url);
runTest();
con.close();
// connect using token via Beeline with inputStream
- url = "jdbc:hive2://" + host + ":" + port + "/default";
+ url = String.format("jdbc:hive2://%s/default",
IPStackUtils.concatHostPort(host, port));
currentResultFile = generateSQL(null);
beeLineArgs = new String[] { "-u", url, "-n", "foo", "-p", "bar", "-a",
"delegationToken" };
System.out.println("Connection with token, user/password via args, using
input redirection");
@@ -149,7 +149,7 @@ public static void main(String[] args) throws Exception {
compareResults( currentResultFile);
// connect using token via Beeline using script
- url = "jdbc:hive2://" + host + ":" + port + "/default";
+ url = String.format("jdbc:hive2://%s/default",
IPStackUtils.concatHostPort(host, port));
currentResultFile = generateSQL(null);
beeLineArgs = new String[] { "-u", url, "-n", "foo", "-p", "bar", "-a",
"delegationToken",
"-f", scriptFileName};
@@ -158,7 +158,7 @@ public static void main(String[] args) throws Exception {
compareResults( currentResultFile);
// connect using token via Beeline using script
- url = "jdbc:hive2://" + host + ":" + port + "/default";
+ url = String.format("jdbc:hive2://%s/default",
IPStackUtils.concatHostPort(host, port));
currentResultFile = generateSQL(url + " foo bar ");
beeLineArgs = new String [] {"-a", "delegationToken", "-f",
scriptFileName};
System.out.println("Connection with token, user/password via connect,
using input script");
@@ -166,7 +166,7 @@ public static void main(String[] args) throws Exception {
compareResults( currentResultFile);
// connect using token via Beeline using script
- url = "jdbc:hive2://" + host + ":" + port + "/default";
+ url = String.format("jdbc:hive2://%s/default",
IPStackUtils.concatHostPort(host, port));
currentResultFile = generateSQL(url + " foo bar ");
System.out.println("Connection with token, user/password via connect,
using input script");
beeLineArgs = new String [] {"-f", scriptFileName, "-a",
"delegationToken"};
@@ -176,8 +176,8 @@ public static void main(String[] args) throws Exception {
/*
* Connect via kerberos with trusted proxy user
*/
- url = "jdbc:hive2://" + host + ":" + port + "/default;principal=" +
serverPrincipal
- + ";hive.server2.proxy.user=" + proxyUser;
+ url =
String.format("jdbc:hive2://%s/default;principal=%s;hive.server2.proxy.user=%s",
+ IPStackUtils.concatHostPort(host, port), serverPrincipal, proxyUser);
con = DriverManager.getConnection(url);
System.out.println("Connected successfully to " + url);
runTest();
@@ -191,7 +191,7 @@ public static void main(String[] args) throws Exception {
/* verify the connection fails after canceling the token */
try {
- url = "jdbc:hive2://" + host + ":" + port +
"/default;auth=delegationToken";
+ url = String.format("jdbc:hive2://%s/default;auth=delegationToken",
IPStackUtils.concatHostPort(host, port));
con = DriverManager.getConnection(url);
throw new Exception ("connection should have failed after token
cancellation");
} catch (SQLException e) {
diff --git
a/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java
b/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java
index 3e17cdd10fc..d3db5ca624c 100644
--- a/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/auth/HiveAuthUtils.java
@@ -33,7 +33,7 @@
import com.google.common.base.Splitter;
import com.google.common.collect.Sets;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.thrift.transport.TSSLTransportFactory;
import
org.apache.thrift.transport.TSSLTransportFactory.TSSLTransportParameters;
import org.apache.thrift.transport.TServerSocket;
diff --git
a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
b/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
index 67da566b51b..4ede0490017 100644
--- a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
+++ b/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java
@@ -28,7 +28,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
import
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.io.Text;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import static org.junit.Assert.assertEquals;
import org.junit.Test;
diff --git
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
index dad8efa5705..8a7f966e89a 100644
---
a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
+++
b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java
@@ -75,7 +75,7 @@
import org.apache.hadoop.mapred.OutputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.StringUtils;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git
a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java
b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java
index 422c2c302d5..9e944a28499 100644
---
a/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java
+++
b/hbase-handler/src/test/org/apache/hadoop/hive/hbase/TestHBaseStorageHandler.java
@@ -29,7 +29,7 @@
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import org.apache.hadoop.mapred.JobConf;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
diff --git
a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java
b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java
index e23876062fa..e176c9a593d 100644
--- a/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java
+++ b/hcatalog/core/src/test/java/org/apache/hive/hcatalog/MiniCluster.java
@@ -34,7 +34,7 @@
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
/**
* This class builds a single instance of itself with the Singleton
diff --git
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
index 86dc20c9e70..751120d8dc8 100644
---
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
+++
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/Main.java
@@ -34,7 +34,7 @@
import org.apache.hadoop.security.authentication.server.AuthenticationFilter;
import
org.apache.hadoop.security.authentication.server.KerberosAuthenticationHandler;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.commons.lang3.StringUtils;
@@ -72,7 +72,6 @@
import javax.servlet.ServletException;
import javax.servlet.ServletRequest;
import javax.servlet.ServletResponse;
-import javax.servlet.annotation.WebFilter;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
diff --git
a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
index a79c5af9cad..95324268222 100644
---
a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
+++
b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
@@ -26,7 +26,7 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hive.shims.HadoopShimsSecure;
import org.apache.hadoop.util.StringUtils;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
diff --git
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java
index 6e5c4722d75..b9a1edeec88 100644
---
a/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java
+++
b/iceberg/iceberg-handler/src/test/java/org/apache/iceberg/mr/hive/TestHiveShell.java
@@ -23,12 +23,12 @@
import java.util.List;
import java.util.stream.Collectors;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.ql.lockmgr.DbTxnManager;
import
org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
-import org.apache.hive.common.IPStackUtils;
import org.apache.hive.service.cli.CLIService;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationHandle;
diff --git
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
index 82ae630f56a..fc48058313c 100644
---
a/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
+++
b/itests/hcatalog-unit/src/test/java/org/apache/hive/hcatalog/hbase/ManyMiniCluster.java
@@ -34,7 +34,7 @@
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.testutils.MiniZooKeeperCluster;
import java.io.File;
diff --git
a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
index 6ff547217ec..9adc38fdf56 100644
---
a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
+++
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestHs2HooksWithMiniKdc.java
@@ -31,7 +31,7 @@
import org.apache.hadoop.hive.hooks.TestHs2Hooks.PostExecHook;
import org.apache.hadoop.hive.hooks.TestHs2Hooks.PreExecHook;
import org.apache.hadoop.hive.hooks.TestHs2Hooks.SemanticAnalysisHook;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.jdbc.miniHS2.MiniHS2;
import org.junit.After;
import org.junit.AfterClass;
diff --git
a/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/metastore/security/TestHadoopAuthBridge23.java
b/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/metastore/security/TestHadoopAuthBridge23.java
index b2550005633..11b66f1386c 100644
---
a/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/metastore/security/TestHadoopAuthBridge23.java
+++
b/itests/hive-unit-hadoop2/src/test/java/org/apache/hadoop/hive/metastore/security/TestHadoopAuthBridge23.java
@@ -39,7 +39,7 @@
import
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
import org.apache.hadoop.security.token.delegation.DelegationKey;
import org.apache.hadoop.util.StringUtils;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.thrift.transport.TSaslServerTransport;
import org.apache.thrift.transport.TTransportException;
import org.junit.Assert;
diff --git
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
index 1c51823b7ce..3be347de24d 100644
---
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
+++
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/hooks/TestHs2Hooks.java
@@ -38,7 +38,7 @@
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.plan.HiveOperation;
import org.apache.hadoop.hive.ql.exec.Task;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.jdbc.HiveConnection;
import org.apache.hive.service.server.HiveServer2;
import org.junit.AfterClass;
diff --git
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
index a3f5e9ab040..7cdc1bbfa9b 100644
---
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
+++
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/metastore/TestReplChangeManager.java
@@ -53,7 +53,7 @@
import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.util.StringUtils;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
diff --git
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestWarehouseDnsPath.java
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestWarehouseDnsPath.java
index 4c7b7b8969a..e906921b17f 100644
---
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestWarehouseDnsPath.java
+++
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestWarehouseDnsPath.java
@@ -24,7 +24,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.metastore.Warehouse;
import org.apache.hadoop.hive.metastore.api.MetaException;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.junit.Test;
public class TestWarehouseDnsPath {
diff --git
a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
index c146d058c68..76767219fab 100644
---
a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
+++
b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/thrift/TestThriftHttpCLIServiceFeatures.java
@@ -44,7 +44,7 @@
import
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveOperationType;
import
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
import
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzContext;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.jdbc.HttpBasicAuthInterceptor;
import org.apache.hive.service.auth.HiveAuthConstants;
import org.apache.hive.service.rpc.thrift.TCLIService;
@@ -231,8 +231,7 @@ private static TTransport getHttpTransport() throws
Exception {
}
private static String getHttpUrl() {
- return transportMode + "://" + host + ":"
- + port +
+ return transportMode + "://" + IPStackUtils.concatHostPort(host, port) +
"/" + thriftHttpPath + "/";
}
diff --git a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
index b90a9c987f0..8f7c3ea8acd 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/HiveConnection.java
@@ -91,6 +91,7 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.jdbc.jwt.HttpJwtAuthRequestInterceptor;
import org.apache.hive.jdbc.saml.HiveJdbcBrowserClientFactory;
import org.apache.hive.jdbc.saml.HiveJdbcSamlRedirectStrategy;
@@ -564,7 +565,7 @@ private String getServerHttpUrl(boolean useSsl) {
} else if (!httpPath.startsWith("/")) {
httpPath = "/" + httpPath;
}
- return schemeName + "://" + host + ":" + port + httpPath;
+ return schemeName + "://" + IPStackUtils.concatHostPort(host, port) +
httpPath;
}
private TTransport createHttpTransport() throws SQLException,
TTransportException {
diff --git a/jdbc/src/java/org/apache/hive/jdbc/Utils.java
b/jdbc/src/java/org/apache/hive/jdbc/Utils.java
index 46a2c582ba3..023c23f7ca0 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/Utils.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/Utils.java
@@ -34,6 +34,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.rpc.thrift.TStatus;
import org.apache.hive.service.rpc.thrift.TStatusCode;
@@ -646,7 +647,7 @@ public static JdbcConnectionParams
extractURLComponents(String uri, Properties i
connParams.setPort(port);
}
// We check for invalid host, port while configuring connParams with
configureConnParams()
- authorityStr = connParams.getHost() + ":" + connParams.getPort();
+ authorityStr = IPStackUtils.concatHostPort(connParams.getHost(),
connParams.getPort());
LOG.debug("Resolved authority: " + authorityStr);
uri = uri.replace(dummyAuthorityString, authorityStr);
}
@@ -659,7 +660,7 @@ public static JdbcConnectionParams
extractURLComponents(String uri, Properties i
static void configureConnParamsFromZooKeeper(JdbcConnectionParams connParams)
throws ZooKeeperHiveClientException, JdbcUriParseException {
ZooKeeperHiveClientHelper.configureConnParams(connParams);
- String authorityStr = connParams.getHost() + ":" + connParams.getPort();
+ String authorityStr = IPStackUtils.concatHostPort(connParams.getHost(),
connParams.getPort());
LOG.debug("Resolved authority: " + authorityStr);
String jdbcUriString = connParams.getJdbcUriString();
// Replace ZooKeeper ensemble from the authority component of the JDBC Uri
provided by the
diff --git a/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java
b/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java
index 513113bb077..bc06ae39cca 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java
@@ -34,7 +34,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.SSLZookeeperFactory;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
import org.apache.hive.service.server.HS2ActivePassiveHARegistry;
import org.apache.hive.service.server.HS2ActivePassiveHARegistryClient;
diff --git
a/kafka-handler/src/test/org/apache/hadoop/hive/kafka/KafkaBrokerResource.java
b/kafka-handler/src/test/org/apache/hadoop/hive/kafka/KafkaBrokerResource.java
index 642358bf337..e2f8bbafe01 100644
---
a/kafka-handler/src/test/org/apache/hadoop/hive/kafka/KafkaBrokerResource.java
+++
b/kafka-handler/src/test/org/apache/hadoop/hive/kafka/KafkaBrokerResource.java
@@ -25,7 +25,7 @@
import kafka.zk.AdminZkClient;
import kafka.zk.EmbeddedZookeeper;
import org.apache.commons.io.FileUtils;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.kafka.common.network.Mode;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.test.TestSslUtils;
diff --git
a/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java
b/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java
index afd5ab920c1..39d21a97c7f 100644
---
a/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java
+++
b/llap-client/src/java/org/apache/hadoop/hive/llap/tezplugins/helpers/LlapTaskUmbilicalServer.java
@@ -35,10 +35,9 @@
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.security.authorize.Service;
import org.apache.hadoop.security.token.Token;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.tez.common.security.JobTokenIdentifier;
import org.apache.tez.common.security.JobTokenSecretManager;
-import org.apache.tez.runtime.api.impl.TezEvent;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java
index fc8ea063678..eb4fe2fee59 100644
---
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java
+++
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/services/impl/LlapWebServices.java
@@ -41,7 +41,7 @@
import org.apache.hadoop.service.AbstractService;
import org.apache.hadoop.service.CompositeService;
import org.apache.hadoop.yarn.api.records.Resource;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.common.util.HiveVersionInfo;
import org.apache.hive.http.HttpServer;
import org.slf4j.Logger;
diff --git
a/llap-server/src/test/org/apache/hadoop/hive/llap/shufflehandler/TestShuffleHandler.java
b/llap-server/src/test/org/apache/hadoop/hive/llap/shufflehandler/TestShuffleHandler.java
index f53891d4f5a..57f3a89c9f1 100644
---
a/llap-server/src/test/org/apache/hadoop/hive/llap/shufflehandler/TestShuffleHandler.java
+++
b/llap-server/src/test/org/apache/hadoop/hive/llap/shufflehandler/TestShuffleHandler.java
@@ -32,7 +32,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataOutputBuffer;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.common.util.Retry;
import
org.apache.tez.runtime.library.common.shuffle.orderedgrouped.ShuffleHeader;
import org.junit.Assert;
diff --git
a/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskCommunicator.java
b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskCommunicator.java
index a90de995a83..227f7b3e642 100644
---
a/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskCommunicator.java
+++
b/llap-tez/src/java/org/apache/hadoop/hive/llap/tezplugins/LlapTaskCommunicator.java
@@ -83,7 +83,7 @@
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.webapp.util.WebAppUtils;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.tez.common.TezTaskUmbilicalProtocol;
import org.apache.tez.common.TezUtils;
import org.apache.tez.common.security.JobTokenSecretManager;
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
index 2eaed2f1107..464c4fcb65f 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommands.java
@@ -78,7 +78,7 @@
import org.apache.hadoop.hive.ql.processors.CommandProcessorException;
import org.apache.hadoop.hive.ql.session.SessionState;
import org.apache.hadoop.hive.ql.txn.compactor.CompactorTestUtilities;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.thrift.TException;
import org.junit.Assert;
import org.junit.Ignore;
diff --git
a/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java
b/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java
index 117590456dd..5b5eaf19cb4 100644
---
a/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java
+++
b/serde/src/test/org/apache/hadoop/hive/serde2/TestTCTLSeparatedProtocol.java
@@ -25,7 +25,7 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.thrift.TCTLSeparatedProtocol;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.thrift.TConfiguration;
import org.apache.thrift.protocol.TField;
import org.apache.thrift.protocol.TList;
diff --git
a/service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java
b/service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java
index 68f2fe2eedb..8a35bce2d27 100644
--- a/service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java
+++ b/service/src/java/org/apache/hive/service/auth/saml/HiveSamlUtils.java
@@ -24,7 +24,7 @@
import javax.servlet.http.HttpServletRequest;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
public class HiveSamlUtils {
diff --git
a/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java
b/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java
index bf0f53636ec..d355724401c 100644
---
a/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java
+++
b/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java
@@ -43,7 +43,7 @@
import org.apache.hadoop.registry.client.types.Endpoint;
import org.apache.hadoop.registry.client.types.ServiceRecord;
import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.service.ServiceException;
import org.apache.hive.service.auth.AuthType;
import org.slf4j.Logger;
diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java
b/service/src/java/org/apache/hive/service/server/HiveServer2.java
index 8477e324b1e..a801996bfca 100644
--- a/service/src/java/org/apache/hive/service/server/HiveServer2.java
+++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java
@@ -99,7 +99,7 @@
import org.apache.hadoop.hive.registry.impl.ZookeeperUtils;
import org.apache.hadoop.hive.shims.ShimLoader;
import org.apache.hadoop.hive.shims.Utils;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.common.util.HiveStringUtils;
import org.apache.hive.common.util.HiveVersionInfo;
import org.apache.hive.common.util.Ref;
diff --git
a/service/src/test/org/apache/hive/service/cli/TestCLIServiceConnectionLimits.java
b/service/src/test/org/apache/hive/service/cli/TestCLIServiceConnectionLimits.java
index b41cb503db6..dff6b6e8fe1 100644
---
a/service/src/test/org/apache/hive/service/cli/TestCLIServiceConnectionLimits.java
+++
b/service/src/test/org/apache/hive/service/cli/TestCLIServiceConnectionLimits.java
@@ -20,7 +20,7 @@
import java.util.List;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.service.cli.session.SessionManager;
import org.junit.Test;
import org.junit.rules.ExpectedException;
diff --git
a/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java
b/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java
index b0092ef4aa1..ca7edab93a7 100644
---
a/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java
+++
b/service/src/test/org/apache/hive/service/cli/TestRetryingThriftCLIServiceClient.java
@@ -20,7 +20,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveServer2TransportMode;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.service.Service;
import org.apache.hive.service.auth.HiveAuthConstants;
import org.apache.hive.service.cli.session.HiveSession;
diff --git
a/service/src/test/org/apache/hive/service/cli/operation/TestOperationLogManager.java
b/service/src/test/org/apache/hive/service/cli/operation/TestOperationLogManager.java
index ff29233287f..4f897f5bb4a 100644
---
a/service/src/test/org/apache/hive/service/cli/operation/TestOperationLogManager.java
+++
b/service/src/test/org/apache/hive/service/cli/operation/TestOperationLogManager.java
@@ -26,7 +26,7 @@
import java.util.Random;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.junit.Before;
import org.junit.Test;
diff --git
a/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java
b/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java
index 3b8005f076c..9ed2a15e9a2 100644
---
a/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java
+++
b/service/src/test/org/apache/hive/service/cli/session/TestSessionManagerMetrics.java
@@ -33,7 +33,7 @@
import org.apache.hadoop.hive.common.metrics.metrics2.MetricsReporting;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.service.cli.FetchOrientation;
import org.apache.hive.service.cli.HiveSQLException;
import org.apache.hive.service.cli.OperationHandle;
diff --git
a/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java
b/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java
index c8d5bef0d7d..c80e4250e8e 100644
--- a/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java
+++ b/service/src/test/org/apache/hive/service/server/TestHS2HttpServer.java
@@ -24,7 +24,7 @@
import org.apache.commons.io.IOUtils;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hive.service.cli.CLIService;
import org.apache.hive.service.cli.OperationHandle;
import org.apache.hive.service.cli.SessionHandle;
diff --git a/common/src/java/org/apache/hive/common/IPStackUtils.java
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/common/IPStackUtils.java
similarity index 97%
rename from common/src/java/org/apache/hive/common/IPStackUtils.java
rename to
standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/common/IPStackUtils.java
index 949b6f14be1..56b60e2c5a6 100644
--- a/common/src/java/org/apache/hive/common/IPStackUtils.java
+++
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/common/IPStackUtils.java
@@ -16,7 +16,7 @@
* limitations under the License.
*/
-package org.apache.hive.common;
+package org.apache.hadoop.hive.common;
import com.google.common.annotations.VisibleForTesting;
import io.netty.util.NetUtil;
@@ -162,9 +162,15 @@ public static String concatLoopbackAddressPort(int port) {
* @return the concatenated host and port
*/
public static String concatHostPort(String host, int port) {
+ validateHostNotEmpty(host);
+ validatePort(port);
return formatIPAddressForURL(host) + ":" + port;
}
+ public static String concatHostPort(String host, String port) {
+ return concatHostPort(host, Integer.parseInt(port));
+ }
+
/**
* Prepares an IP address for use in a URL.
* <p>
@@ -251,9 +257,7 @@ public static HostPort getHostAndPort(String input) {
port = getPort(input.substring(colonIndex + 1));
// Check if the host is not null or empty
- if (StringUtils.isEmpty(host) || host.equals("[]")) {
- throw new IllegalArgumentException("Host address is null or empty.");
- }
+ validateHostNotEmpty(host);
// Handle IPv6 addresses enclosed in square brackets (e.g., [IPv6]:port)
if (host.startsWith("[") && host.endsWith("]")) {
@@ -280,6 +284,12 @@ public static int getPort(String portString) {
validatePort(port);
return port;
}
+
+ private static void validateHostNotEmpty(String host) {
+ if (StringUtils.isEmpty(host) || host.equals("[]")) {
+ throw new IllegalArgumentException("Host address is null or empty.");
+ }
+ }
private static void validatePort(int port) {
if (port < 0 || port > 65535) {
diff --git a/common/src/test/org/apache/hive/common/IPStackUtilsTest.java
b/standalone-metastore/metastore-common/src/test/java/org/apache/hadoop/hive/common/IPStackUtilsTest.java
similarity index 64%
rename from common/src/test/org/apache/hive/common/IPStackUtilsTest.java
rename to
standalone-metastore/metastore-common/src/test/java/org/apache/hadoop/hive/common/IPStackUtilsTest.java
index 7b289ee275f..7e4561321d0 100644
--- a/common/src/test/org/apache/hive/common/IPStackUtilsTest.java
+++
b/standalone-metastore/metastore-common/src/test/java/org/apache/hadoop/hive/common/IPStackUtilsTest.java
@@ -16,16 +16,17 @@
* limitations under the License.
*/
-package org.apache.hive.common;
+package org.apache.hadoop.hive.common;
-import org.junit.jupiter.api.Test;
+import org.junit.Test;
-import static org.junit.jupiter.api.Assertions.*;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
-class IPStackUtilsTest {
+public class IPStackUtilsTest {
@Test
- void testIPv4LoopbackWhenIPv4StackIsForced() {
+ public void testIPv4LoopbackWhenIPv4StackIsForced() {
IPStackUtils.setPreferIPv4Stack(true);
IPStackUtils.setPreferIPv6Addresses(false);
@@ -34,7 +35,7 @@ void testIPv4LoopbackWhenIPv4StackIsForced() {
}
@Test
- void testIPv6LoopbackWhenIPv6IsPreferred() {
+ public void testIPv6LoopbackWhenIPv6IsPreferred() {
IPStackUtils.setPreferIPv4Stack(false);
IPStackUtils.setPreferIPv6Addresses(true);
@@ -43,7 +44,7 @@ void testIPv6LoopbackWhenIPv6IsPreferred() {
}
@Test
- void testIPv4LoopbackWhenIPv6IsNotPreferred() {
+ public void testIPv4LoopbackWhenIPv6IsNotPreferred() {
IPStackUtils.setPreferIPv4Stack(false);
IPStackUtils.setPreferIPv6Addresses(false);
@@ -52,7 +53,7 @@ void testIPv4LoopbackWhenIPv6IsNotPreferred() {
}
@Test
- void testIPv4WildcardWhenIPv4StackIsForced() {
+ public void testIPv4WildcardWhenIPv4StackIsForced() {
IPStackUtils.setPreferIPv4Stack(true);
IPStackUtils.setPreferIPv6Addresses(false);
@@ -61,7 +62,7 @@ void testIPv4WildcardWhenIPv4StackIsForced() {
}
@Test
- void testIPv6WildcardWhenIPv6IsPreferred() {
+ public void testIPv6WildcardWhenIPv6IsPreferred() {
IPStackUtils.setPreferIPv4Stack(false);
IPStackUtils.setPreferIPv6Addresses(true);
@@ -70,7 +71,7 @@ void testIPv6WildcardWhenIPv6IsPreferred() {
}
@Test
- void testIPv4WildcardWhenIPv6IsNotPreferred() {
+ public void testIPv4WildcardWhenIPv6IsNotPreferred() {
IPStackUtils.setPreferIPv4Stack(false);
IPStackUtils.setPreferIPv6Addresses(false);
@@ -79,7 +80,7 @@ void testIPv4WildcardWhenIPv6IsNotPreferred() {
}
@Test
- void testConcatHostPort() {
+ public void testConcatHostPort() {
assertEquals("192.168.1.1:8080",
IPStackUtils.concatHostPort("192.168.1.1", 8080));
assertEquals("[2001:db8::1]:8080",
IPStackUtils.concatHostPort("2001:db8::1", 8080));
assertEquals("[::1]:9090", IPStackUtils.concatHostPort("::1", 9090));
@@ -87,7 +88,7 @@ void testConcatHostPort() {
}
@Test
- void testWildcardWhenIPv4StackIsForcedAndIPv4WildcardProvided() {
+ public void testWildcardWhenIPv4StackIsForcedAndIPv4WildcardProvided() {
IPStackUtils.setPreferIPv4Stack(true);
IPStackUtils.setPreferIPv6Addresses(false);
@@ -96,7 +97,7 @@ void
testWildcardWhenIPv4StackIsForcedAndIPv4WildcardProvided() {
}
@Test
- void testWildcardWhenIPv4StackIsForcedAndIPv6WildcardProvided() {
+ public void testWildcardWhenIPv4StackIsForcedAndIPv6WildcardProvided() {
IPStackUtils.setPreferIPv4Stack(true);
IPStackUtils.setPreferIPv6Addresses(false);
@@ -106,7 +107,7 @@ void
testWildcardWhenIPv4StackIsForcedAndIPv6WildcardProvided() {
@Test
- void testWildcardWhenIPv6IsPreferredAndIPv6WildcardProvided() {
+ public void testWildcardWhenIPv6IsPreferredAndIPv6WildcardProvided() {
IPStackUtils.setPreferIPv4Stack(false);
IPStackUtils.setPreferIPv6Addresses(true);
@@ -115,7 +116,7 @@ void
testWildcardWhenIPv6IsPreferredAndIPv6WildcardProvided() {
}
@Test
- void testWildcardWhenIPv6IsPreferredAndIPv4WildcardProvided() {
+ public void testWildcardWhenIPv6IsPreferredAndIPv4WildcardProvided() {
IPStackUtils.setPreferIPv4Stack(false);
IPStackUtils.setPreferIPv6Addresses(true);
@@ -124,7 +125,7 @@ void
testWildcardWhenIPv6IsPreferredAndIPv4WildcardProvided() {
}
@Test
- void testWildcardWhenIPv6IsNotPreferredAndIPv4WildcardProvided() {
+ public void testWildcardWhenIPv6IsNotPreferredAndIPv4WildcardProvided() {
IPStackUtils.setPreferIPv4Stack(false);
IPStackUtils.setPreferIPv6Addresses(false);
@@ -133,7 +134,7 @@ void
testWildcardWhenIPv6IsNotPreferredAndIPv4WildcardProvided() {
}
@Test
- void testWildcardWhenIPv6IsNotPreferredAndIPv6WildcardProvided() {
+ public void testWildcardWhenIPv6IsNotPreferredAndIPv6WildcardProvided() {
IPStackUtils.setPreferIPv4Stack(false);
IPStackUtils.setPreferIPv6Addresses(false);
@@ -142,7 +143,7 @@ void
testWildcardWhenIPv6IsNotPreferredAndIPv6WildcardProvided() {
}
@Test
- void testAdaptWildcardAddress() {
+ public void testAdaptWildcardAddress() {
assertEquals("192.168.1.1",
IPStackUtils.adaptWildcardAddress("192.168.1.1"));
assertEquals("2001:db8::1",
IPStackUtils.adaptWildcardAddress("2001:db8::1"));
assertEquals("example.com",
IPStackUtils.adaptWildcardAddress("example.com"));
@@ -151,59 +152,93 @@ void testAdaptWildcardAddress() {
// Test cases for getHostAndPort method
@Test
- void testGetHostAndPortWithIPv4() {
+ public void testGetHostAndPortWithIPv4() {
IPStackUtils.HostPort result =
IPStackUtils.getHostAndPort("192.168.1.1:8080");
assertEquals("192.168.1.1", result.getHostname());
assertEquals(8080, result.getPort());
}
@Test
- void testGetHostAndPortWithValidIPv6WithSquaredBrackets() {
+ public void testGetHostAndPortWithValidIPv6WithSquaredBrackets() {
IPStackUtils.HostPort result =
IPStackUtils.getHostAndPort("[2001:0db8::1]:8080");
assertEquals("2001:0db8::1", result.getHostname());
assertEquals(8080, result.getPort());
}
@Test
- void testGetHostAndPortWithValidIPv6WithoutSquaredBrackets() {
+ public void testGetHostAndPortWithValidIPv6WithoutSquaredBrackets() {
IPStackUtils.HostPort result =
IPStackUtils.getHostAndPort("2001:0db8::1:8080");
assertEquals("2001:0db8::1", result.getHostname());
assertEquals(8080, result.getPort());
}
@Test
- void testGetHostAndPortWithHostname() {
+ public void testGetHostAndPortWithHostname() {
IPStackUtils.HostPort result =
IPStackUtils.getHostAndPort("example.com:80");
assertEquals("example.com", result.getHostname());
assertEquals(80, result.getPort());
}
@Test
- void testGetHostPortWithInvalidAndPort() {
- assertThrows(IllegalArgumentException.class, () ->
IPStackUtils.getHostAndPort("192.168.1.1:70000"),
- "Port number out of range (0-65535).");
- assertThrows(IllegalArgumentException.class, () ->
IPStackUtils.getHostAndPort("192.168.1.1"),
- "Input does not contain a port.");
- assertThrows(IllegalArgumentException.class, () ->
IPStackUtils.getHostAndPort(":8080"),
- "Host address is null or empty.");
+ public void testGetHostPortWithInvalidAndPort() {
+ // Test case: port number out of range
+ try {
+ IPStackUtils.getHostAndPort("192.168.1.1:70000");
+ fail("Expected IllegalArgumentException to be thrown");
+ } catch (IllegalArgumentException e) {
+ assertEquals("Port number out of range (0-65535).", e.getMessage());
+ }
+
+ // Test case: input missing port
+ try {
+ IPStackUtils.getHostAndPort("192.168.1.1");
+ fail("Expected IllegalArgumentException to be thrown");
+ } catch (IllegalArgumentException e) {
+ assertEquals("Input does not contain a port.", e.getMessage());
+ }
+
+ // Test case: missing host
+ try {
+ IPStackUtils.getHostAndPort(":8080");
+ fail("Expected IllegalArgumentException to be thrown");
+ } catch (IllegalArgumentException e) {
+ assertEquals("Host address is null or empty.", e.getMessage());
+ }
}
// Test cases for getPort method
@Test
- void testGetPort() {
+ public void testGetPort() {
assertEquals(8080, IPStackUtils.getPort("8080"));
assertEquals(65535, IPStackUtils.getPort("65535"));
assertEquals(0, IPStackUtils.getPort("0"));
}
@Test
- void testGetPortWithInvalidPort() {
- assertThrows(IllegalArgumentException.class, () ->
IPStackUtils.getPort("70000"),
- "Port number out of range (0-65535).");
- assertThrows(IllegalArgumentException.class, () ->
IPStackUtils.getPort("-1"),
- "Port number out of range (0-65535).");
- assertThrows(IllegalArgumentException.class, () ->
IPStackUtils.getPort("abc"),
- "For input string: \"abc\"");
+ public void testGetPortWithInvalidPort() {
+ // Test case: port number too high
+ try {
+ IPStackUtils.getPort("70000");
+ fail("Expected IllegalArgumentException to be thrown");
+ } catch (IllegalArgumentException e) {
+ assertEquals("Port number out of range (0-65535).", e.getMessage());
+ }
+
+ // Test case: negative port number
+ try {
+ IPStackUtils.getPort("-1");
+ fail("Expected IllegalArgumentException to be thrown");
+ } catch (IllegalArgumentException e) {
+ assertEquals("Port number out of range (0-65535).", e.getMessage());
+ }
+
+ // Test case: non-numeric port
+ try {
+ IPStackUtils.getPort("abc");
+ fail("Expected IllegalArgumentException to be thrown");
+ } catch (IllegalArgumentException e) {
+ assertEquals("For input string: \"abc\"", e.getMessage());
+ }
}
}
\ No newline at end of file
diff --git
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 9cbbe7e43fe..c30e000cba5 100644
---
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -46,6 +46,7 @@
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ShutdownHookManager;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.core.LoggerContext;
import org.apache.thrift.TProcessor;
@@ -771,7 +772,7 @@ static ServletServerBuilder.Descriptor
createIcebergServlet(Configuration config
* @throws Exception
*/
private static String getServerInstanceURI(int port) throws Exception {
- return getServerHostName() + ":" + port;
+ return IPStackUtils.concatHostPort(getServerHostName(), port);
}
static String getServerHostName() throws Exception {
diff --git
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/MetaStoreTestUtils.java
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/MetaStoreTestUtils.java
index 11b7e8c75ce..8bb2b206aea 100644
---
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/MetaStoreTestUtils.java
+++
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/MetaStoreTestUtils.java
@@ -43,6 +43,7 @@
import org.apache.hadoop.hive.metastore.security.HadoopThriftAuthBridge;
import org.apache.hadoop.hive.metastore.utils.TestTxnDbUtil;
import org.apache.hadoop.hive.metastore.utils.MetaStoreServerUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.thrift.TException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -272,7 +273,7 @@ private static void loopUntilZKReady(Configuration conf,
String msHost, int port
} else {
uri = InetAddress.getLocalHost().getHostName();
}
- uri = uri + ":" + port;
+ uri = IPStackUtils.concatHostPort(uri, port);
int retries = 0;
while (true) {
try {
diff --git
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java
index a23cc485463..56d274028c6 100644
---
a/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java
+++
b/standalone-metastore/metastore-server/src/test/java/org/apache/hadoop/hive/metastore/TestHiveMetaStoreTimeout.java
@@ -32,7 +32,7 @@
import org.apache.hadoop.hive.metastore.client.builder.DatabaseBuilder;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf.ConfVars;
-import org.apache.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.thrift.TException;
import org.apache.thrift.transport.TTransportException;
import org.junit.After;