This is an automated email from the ASF dual-hosted git repository.
dengzh pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new 6c7e9722479 HIVE-29138: Authentication failure when connecting to SASL
enforced ZooKeeper (#6024)
6c7e9722479 is described below
commit 6c7e972247957976fd12e9bbc4fb9343ffa78a71
Author: dengzh <[email protected]>
AuthorDate: Tue Sep 2 09:41:00 2025 +0800
HIVE-29138: Authentication failure when connecting to SASL enforced
ZooKeeper (#6024)
---
.../java/org/apache/hive/minikdc/MiniHiveKdc.java | 4 +-
.../minikdc/TestZooKeeperHS2HAWithMiniKdc.java | 90 +++++++++++
.../hive/minikdc/TestZooKeeperWithMiniKdc.java | 173 +++++++++++++++++++++
.../hive-minikdc/src/test/resources/zk_jaas.conf | 8 +
jdbc/pom.xml | 2 +-
.../hive/jdbc/ZooKeeperHiveClientHelper.java | 35 +++--
.../hadoop/hive/registry/impl/ZkRegistryBase.java | 23 ++-
.../hadoop/hive/registry/impl/ZookeeperUtils.java | 134 ----------------
.../hive/registry/impl/TestZookeeperUtils.java | 104 -------------
.../service/server/HS2ActivePassiveHARegistry.java | 2 +-
.../apache/hive/service/server/HiveServer2.java | 32 ++--
.../service/server/KillQueryZookeeperManager.java | 14 +-
.../java/org/apache/hadoop/hive/shims/Utils.java | 75 ---------
.../client/ThriftHiveMetaStoreClient.java | 9 ++
.../hadoop/hive/metastore/utils/SecurityUtils.java | 66 +++++++-
.../hadoop/hive/metastore/HiveMetaStore.java | 18 +--
.../metastore/security/ZooKeeperTokenStore.java | 43 -----
17 files changed, 406 insertions(+), 426 deletions(-)
diff --git
a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
index 83db75e2557..7c6169fce2f 100644
--- a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
+++ b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/MiniHiveKdc.java
@@ -53,8 +53,8 @@ public class MiniHiveKdc {
public static String AUTHENTICATION_TYPE = "KERBEROS";
private static final String HIVE_METASTORE_SERVICE_PRINCIPAL = "hive";
- private final MiniKdc miniKdc;
- private final File workDir;
+ final MiniKdc miniKdc;
+ final File workDir;
private final Map<String, String> userPrincipals =
new HashMap<String, String>();
private final Properties kdcConf = MiniKdc.createConf();
diff --git
a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestZooKeeperHS2HAWithMiniKdc.java
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestZooKeeperHS2HAWithMiniKdc.java
new file mode 100644
index 00000000000..dbad86aa08a
--- /dev/null
+++
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestZooKeeperHS2HAWithMiniKdc.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.minikdc;
+
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.util.HashMap;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.security.ZooKeeperTokenStore;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+import org.apache.zookeeper.client.ZooKeeperSaslClient;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.testcontainers.containers.GenericContainer;
+
+import static org.apache.hive.minikdc.TestZooKeeperWithMiniKdc.startZooKeeper;
+
+public class TestZooKeeperHS2HAWithMiniKdc {
+ private static final String ZK_PRINCIPAL = "zookeeper";
+ private static MiniHS2 miniHS2 = null;
+ private static MiniHiveKdc miniKDC;
+ private static HiveConf conf;
+ private static GenericContainer<?> zookeeper;
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ miniKDC = new MiniHiveKdc();
+ conf = new HiveConf();
+ miniKDC.addUserPrincipal(miniKDC.getServicePrincipalForUser(ZK_PRINCIPAL));
+ zookeeper = startZooKeeper(miniKDC, conf);
+ DriverManager.setLoginTimeout(0);
+ conf.set("hive.cluster.delegation.token.store.class",
ZooKeeperTokenStore.class.getName());
+ HiveConf.setBoolVar(conf,
HiveConf.ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY, true);
+ HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS,
true);
+ HiveConf.setBoolVar(conf,
HiveConf.ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_ENABLE, true);
+ HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY,
false);
+ HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_FETCH_TASK_CACHING,
false);
+ miniHS2 = MiniHiveKdc.getMiniHS2WithKerb(miniKDC, conf);
+ miniHS2.start(new HashMap<String, String>());
+ }
+
+ @Test
+ public void testJdbcConnection() throws Exception {
+ System.clearProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY);
+ String url = "jdbc:hive2://localhost:" + zookeeper.getMappedPort(2181) +
"/default;" +
+
"serviceDiscoveryMode=zooKeeperHA;zooKeeperNamespace=hs2ActivePassiveHA;principal=hive/[email protected]";
+ try (Connection con = DriverManager.getConnection(url);
+ ResultSet rs = con.getMetaData().getCatalogs()) {
+ Assert.assertFalse(rs.next());
+ ((HiveConnection) con).getDelegationToken("hive", "hive");
+ }
+
Assert.assertNotNull(System.getProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY));
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ try {
+ if (zookeeper != null) {
+ zookeeper.stop();
+ }
+ } finally {
+ miniKDC.shutDown();
+ if (miniHS2 != null && miniHS2.isStarted()) {
+ miniHS2.stop();
+ miniHS2.cleanup();
+ }
+ }
+ }
+}
diff --git
a/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestZooKeeperWithMiniKdc.java
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestZooKeeperWithMiniKdc.java
new file mode 100644
index 00000000000..049e35010f8
--- /dev/null
+++
b/itests/hive-minikdc/src/test/java/org/apache/hive/minikdc/TestZooKeeperWithMiniKdc.java
@@ -0,0 +1,173 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hive.minikdc;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.net.URI;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+
+import org.apache.commons.lang3.tuple.Pair;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.security.ZooKeeperTokenStore;
+import org.apache.hive.jdbc.HiveConnection;
+import org.apache.hive.jdbc.miniHS2.MiniHS2;
+import org.apache.zookeeper.client.ZooKeeperSaslClient;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.testcontainers.containers.BindMode;
+import org.testcontainers.containers.GenericContainer;
+import org.testcontainers.containers.wait.strategy.Wait;
+import org.testcontainers.utility.DockerImageName;
+
+public class TestZooKeeperWithMiniKdc {
+ private static final String ZK_PRINCIPAL = "zookeeper";
+ private static MiniHS2 miniHS2 = null;
+ private static MiniHiveKdc miniKDC;
+ private static HiveConf conf;
+ private static GenericContainer<?> zookeeper;
+
+ @BeforeClass
+ public static void setUp() throws Exception {
+ miniKDC = new MiniHiveKdc();
+ conf = new HiveConf();
+ miniKDC.addUserPrincipal(miniKDC.getServicePrincipalForUser(ZK_PRINCIPAL));
+ zookeeper = startZooKeeper(miniKDC, conf);
+ String hiveMetastorePrincipal =
+
miniKDC.getFullyQualifiedServicePrincipal(miniKDC.getHiveMetastoreServicePrincipal());
+ String hiveMetastoreKeytab = miniKDC.getKeyTabFile(
+
miniKDC.getServicePrincipalForUser(miniKDC.getHiveMetastoreServicePrincipal()));
+ MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.USE_THRIFT_SASL,
true);
+ conf.set("hive.metastore.kerberos.principal", hiveMetastorePrincipal);
+ conf.set("hive.metastore.kerberos.keytab.file", hiveMetastoreKeytab);
+ conf.set("hive.cluster.delegation.token.store.class",
ZooKeeperTokenStore.class.getName());
+ MetastoreConf.setBoolVar(conf, MetastoreConf.ConfVars.EXECUTE_SET_UGI,
false);
+ MetastoreConf.setVar(conf,
MetastoreConf.ConfVars.THRIFT_SERVICE_DISCOVERY_MODE, "zookeeper");
+ MetastoreConf.setVar(conf, MetastoreConf.ConfVars.THRIFT_BIND_HOST,
"localhost");
+
+ DriverManager.setLoginTimeout(0);
+ HiveConf.setVar(conf, HiveConf.ConfVars.METASTORE_URIS, "localhost:" +
zookeeper.getMappedPort(2181));
+ HiveConf.setBoolVar(conf,
HiveConf.ConfVars.HIVE_SERVER2_SUPPORT_DYNAMIC_SERVICE_DISCOVERY, true);
+ HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS,
true);
+ HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY,
false);
+ HiveConf.setBoolVar(conf, HiveConf.ConfVars.HIVE_FETCH_TASK_CACHING,
false);
+ miniHS2 = MiniHiveKdc.getMiniHS2WithKerbWithRemoteHMSWithKerb(miniKDC,
conf);
+ miniHS2.start(new HashMap<String, String>());
+ }
+
+ @Test
+ public void testMetaStoreClient() throws Exception {
+ System.clearProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY);
+ Assert.assertEquals("localhost:" + zookeeper.getMappedPort(2181),
+ MetastoreConf.getVar(conf, MetastoreConf.ConfVars.THRIFT_URIS));
+ try (HiveMetaStoreClient client = new HiveMetaStoreClient(conf)) {
+ URI[] uris = client.getThriftClient().getMetastoreUris();
+ Assert.assertEquals(1, uris.length);
+ Assert.assertEquals(miniHS2.getHmsPort(), uris[0].getPort());
+ client.addMasterKey("adbcedfghigklmn");
+ }
+
Assert.assertNotNull(System.getProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY));
+ }
+
+ @Test
+ public void testJdbcConnection() throws Exception {
+ System.clearProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY);
+ String url = "jdbc:hive2://localhost:" + zookeeper.getMappedPort(2181) +
"/default;" +
+
"serviceDiscoveryMode=zooKeeper;zooKeeperNamespace=hiveserver2;principal=hive/[email protected]";
+ try (Connection con = DriverManager.getConnection(url);
+ ResultSet rs = con.getMetaData().getCatalogs()) {
+ Assert.assertFalse(rs.next());
+ ((HiveConnection) con).getDelegationToken("hive", "hive");
+ }
+
Assert.assertNotNull(System.getProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY));
+ }
+
+ static GenericContainer<?>
+ startZooKeeper(MiniHiveKdc miniKDC, HiveConf conf) throws Exception {
+ Pair<File, int[]> krb5Conf = forkNewKrbConf(miniKDC);
+ GenericContainer<?> zookeeper = new
GenericContainer<>(DockerImageName.parse("zookeeper:3.8.4"))
+ .withExposedPorts(2181)
+ .waitingFor(Wait.forLogMessage(".*binding to port.*2181.*\\n", 1))
+ .withEnv("JVMFLAGS",
"-Djava.security.auth.login.config=/conf/jaas.conf")
+ .withEnv("ZOO_CFG_EXTRA",
"authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider" +
+ " sessionRequireClientSASLAuth=true")
+
.withFileSystemBind(miniKDC.getKeyTabFile(miniKDC.getServicePrincipalForUser(ZK_PRINCIPAL)),
+ "/conf/zookeeper.keytab", BindMode.READ_ONLY)
+
.withFileSystemBind(TestZooKeeperWithMiniKdc.class.getClassLoader().getResource("zk_jaas.conf").getPath(),
+ "/conf/jaas.conf", BindMode.READ_ONLY)
+ .withFileSystemBind(krb5Conf.getLeft().getPath(), "/etc/krb5.conf",
BindMode.READ_ONLY);
+ if (krb5Conf.getRight().length > 0) {
+ org.testcontainers.Testcontainers.exposeHostPorts(krb5Conf.getRight());
+ }
+ zookeeper.start();
+ HiveConf.setVar(conf, HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM,
"localhost:" + zookeeper.getMappedPort(2181));
+ return zookeeper;
+ }
+
+ private static Pair<File, int[]> forkNewKrbConf(MiniHiveKdc miniKDC) throws
Exception {
+ File krb5 = miniKDC.miniKdc.getKrb5conf();
+ File newKrb5 = new File(miniKDC.workDir, krb5.getName() + "_new");
+ List<Integer> hostPorts = new ArrayList<>();
+ try (BufferedReader reader = new BufferedReader(new FileReader(krb5));
+ FileWriter writer = new FileWriter(newKrb5, false)) {
+ String line;
+ String localhost = "localhost:";
+ while ((line = reader.readLine()) != null) {
+ if (line.contains(localhost)) {
+ hostPorts.add(Integer.valueOf(line.split(localhost)[1]));
+ line = line.replace("localhost", "host.testcontainers.internal");
+ }
+ writer.write(line);
+ writer.write(System.lineSeparator());
+ }
+ writer.flush();
+ }
+ int[] ports = new int[hostPorts.size()];
+ for (int i = 0; i < hostPorts.size(); i++) {
+ ports[i] = hostPorts.get(i);
+ }
+ return Pair.of(newKrb5, ports);
+ }
+
+ @AfterClass
+ public static void tearDown() throws Exception {
+ try {
+ if (zookeeper != null) {
+ zookeeper.stop();
+ }
+ } finally {
+ miniKDC.shutDown();
+ if (miniHS2 != null && miniHS2.isStarted()) {
+ miniHS2.stop();
+ miniHS2.cleanup();
+ }
+ }
+ }
+}
diff --git a/itests/hive-minikdc/src/test/resources/zk_jaas.conf
b/itests/hive-minikdc/src/test/resources/zk_jaas.conf
new file mode 100644
index 00000000000..47d8ad3d154
--- /dev/null
+++ b/itests/hive-minikdc/src/test/resources/zk_jaas.conf
@@ -0,0 +1,8 @@
+ Server {
+ com.sun.security.auth.module.Krb5LoginModule required
+ useKeyTab=true
+ keyTab="/conf/zookeeper.keytab"
+ storeKey=true
+ useTicketCache=false
+ principal="zookeeper/[email protected]";
+ };
\ No newline at end of file
diff --git a/jdbc/pom.xml b/jdbc/pom.xml
index 86e1e5f6295..d5c05c4c384 100644
--- a/jdbc/pom.xml
+++ b/jdbc/pom.xml
@@ -241,7 +241,7 @@
<include>org/apache/hadoop/security/token/**</include>
<include>org/apache/hadoop/util/*</include>
<include>org/apache/hadoop/net/*</include>
- <include>org/apache/hadoop/io/*</include>
+ <include>org/apache/hadoop/io/**</include>
<include>org/apache/hadoop/HadoopIllegalArgumentException.class</include>
</includes>
</filter>
diff --git a/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java
b/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java
index bc06ae39cca..6ab722b7371 100644
--- a/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java
+++ b/jdbc/src/java/org/apache/hive/jdbc/ZooKeeperHiveClientHelper.java
@@ -35,6 +35,7 @@
import org.apache.hadoop.hive.common.SSLZookeeperFactory;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.common.IPStackUtils;
+import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
import org.apache.hive.jdbc.Utils.JdbcConnectionParams;
import org.apache.hive.service.server.HS2ActivePassiveHARegistry;
import org.apache.hive.service.server.HS2ActivePassiveHARegistryClient;
@@ -70,8 +71,7 @@ private static String
getZooKeeperNamespace(JdbcConnectionParams connParams) {
*/
public static boolean isZkHADynamicDiscoveryMode(Map<String, String>
sessionConf) {
final String discoveryMode =
sessionConf.get(JdbcConnectionParams.SERVICE_DISCOVERY_MODE);
- return (discoveryMode != null) &&
-
JdbcConnectionParams.SERVICE_DISCOVERY_MODE_ZOOKEEPER_HA.equalsIgnoreCase(discoveryMode);
+ return
JdbcConnectionParams.SERVICE_DISCOVERY_MODE_ZOOKEEPER_HA.equalsIgnoreCase(discoveryMode);
}
/**
@@ -82,9 +82,14 @@ public static boolean isZkHADynamicDiscoveryMode(Map<String,
String> sessionConf
*/
public static boolean isZkDynamicDiscoveryMode(Map<String, String>
sessionConf) {
final String discoveryMode =
sessionConf.get(JdbcConnectionParams.SERVICE_DISCOVERY_MODE);
- return (discoveryMode != null)
- &&
(JdbcConnectionParams.SERVICE_DISCOVERY_MODE_ZOOKEEPER.equalsIgnoreCase(discoveryMode)
||
-
JdbcConnectionParams.SERVICE_DISCOVERY_MODE_ZOOKEEPER_HA.equalsIgnoreCase(discoveryMode));
+ return
JdbcConnectionParams.SERVICE_DISCOVERY_MODE_ZOOKEEPER.equalsIgnoreCase(discoveryMode)
||
+
JdbcConnectionParams.SERVICE_DISCOVERY_MODE_ZOOKEEPER_HA.equalsIgnoreCase(discoveryMode);
+ }
+
+ static boolean isZkEnforceSASLClient(Map<String, String> sessionVars) {
+ HiveConf.ConfVars confVars = HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS;
+ return sessionVars.containsKey(JdbcConnectionParams.AUTH_PRINCIPAL) &&
+ Boolean.parseBoolean(sessionVars.getOrDefault(confVars.varname,
confVars.getDefaultValue()));
}
/**
@@ -128,6 +133,11 @@ private static CuratorFramework
getZkClient(JdbcConnectionParams connParams) thr
connParams.getZookeeperTrustStoreLocation(),
connParams.getZookeeperTrustStorePassword(),
connParams.getZookeeperTrustStoreType()))
.build();
+
+ // If the client is requesting the Kerberos, then the ZooKeeper is mostly
Kerberos-secured
+ if (isZkEnforceSASLClient(connParams.getSessionVars())) {
+ SecurityUtils.setZookeeperClientKerberosJaasConfig(null, null);
+ }
zooKeeperClient.start();
return zooKeeperClient;
}
@@ -195,38 +205,31 @@ static void configureConnParams(JdbcConnectionParams
connParams) throws ZooKeepe
if (isZkHADynamicDiscoveryMode(connParams.getSessionVars())) {
configureConnParamsHA(connParams);
} else {
- CuratorFramework zooKeeperClient = null;
- try {
- zooKeeperClient = getZkClient(connParams);
-
+ try (CuratorFramework zooKeeperClient = getZkClient(connParams)) {
final List<String> serverHosts = getServerHosts(connParams,
zooKeeperClient);
-
if (serverHosts.isEmpty()) {
throw new ZooKeeperHiveClientException("No more HiveServer2 URIs
from ZooKeeper to attempt");
}
-
// Pick a server node randomly
final String serverNode =
serverHosts.get(ThreadLocalRandom.current().nextInt(serverHosts.size()));
-
updateParamsWithZKServerNode(connParams, zooKeeperClient, serverNode);
} catch (ZooKeeperHiveClientException zkhce) {
throw zkhce;
} catch (Exception e) {
throw new ZooKeeperHiveClientException("Unable to read HiveServer2
configs from ZooKeeper", e);
- } finally {
- if (zooKeeperClient != null) {
- zooKeeperClient.close();
- }
}
}
}
private static void configureConnParamsHA(JdbcConnectionParams connParams)
throws ZooKeeperHiveClientException {
try {
+
Configuration registryConf = new Configuration();
registryConf.set(HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM.varname,
connParams.getZooKeeperEnsemble());
registryConf.set(HiveConf.ConfVars.HIVE_SERVER2_ACTIVE_PASSIVE_HA_REGISTRY_NAMESPACE.varname,
getZooKeeperNamespace(connParams));
+
registryConf.setBoolean(HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS.varname,
+ isZkEnforceSASLClient(connParams.getSessionVars()));
HS2ActivePassiveHARegistry haRegistryClient =
HS2ActivePassiveHARegistryClient.getClient(registryConf);
boolean foundLeader = false;
String maxRetriesConf =
connParams.getSessionVars().get(JdbcConnectionParams.RETRIES);
diff --git
a/llap-client/src/java/org/apache/hadoop/hive/registry/impl/ZkRegistryBase.java
b/llap-client/src/java/org/apache/hadoop/hive/registry/impl/ZkRegistryBase.java
index f6a52cd3b2d..6e6ce31d6ff 100644
---
a/llap-client/src/java/org/apache/hadoop/hive/registry/impl/ZkRegistryBase.java
+++
b/llap-client/src/java/org/apache/hadoop/hive/registry/impl/ZkRegistryBase.java
@@ -47,6 +47,7 @@
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
import org.apache.hadoop.hive.llap.LlapUtil;
+import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
import org.apache.hadoop.hive.registry.RegistryUtilities;
import org.apache.hadoop.hive.registry.ServiceInstance;
import org.apache.hadoop.hive.registry.ServiceInstanceStateChangeListener;
@@ -187,7 +188,7 @@ public ZkRegistryBase(String instanceName, Configuration
conf, String rootNs, St
public static String getRootNamespace(Configuration conf, String
userProvidedNamespace,
String defaultNamespacePrefix) {
- final boolean isSecure = ZookeeperUtils.isKerberosEnabled(conf);
+ final boolean isSecure = isZkEnforceSASLClient(conf);
String rootNs = userProvidedNamespace;
if (rootNs == null) {
rootNs = defaultNamespacePrefix + (isSecure ? SASL_NAMESPACE :
UNSECURE_NAMESPACE);
@@ -196,7 +197,7 @@ public static String getRootNamespace(Configuration conf,
String userProvidedNam
}
private ACLProvider getACLProviderForZKPath(String zkPath) {
- final boolean isSecure = ZookeeperUtils.isKerberosEnabled(conf);
+ final boolean isSecure = isZkEnforceSASLClient(conf);
return new ACLProvider() {
@Override
public List<ACL> getDefaultAcl() {
@@ -405,7 +406,7 @@ final protected void initializeWithoutRegisteringInternal()
throws IOException {
}
private void checkAndSetAcls() throws Exception {
- if (!ZookeeperUtils.isKerberosEnabled(conf)) {
+ if (!isZkEnforceSASLClient(conf)) {
return;
}
// We are trying to check ACLs on the "workers" directory, which noone
except us should be
@@ -670,10 +671,13 @@ protected final synchronized PathChildrenCache
ensureInstancesCache(
public void start() throws IOException {
if (zooKeeperClient != null) {
- String principal = ZookeeperUtils.setupZookeeperAuth(
- conf, saslLoginContextName, zkPrincipal, zkKeytab);
- if (principal != null) {
- userNameFromPrincipal = LlapUtil.getUserNameFromPrincipal(principal);
+ if (isZkEnforceSASLClient(conf)) {
+ if (saslLoginContextName != null) {
+ SecurityUtils.setZookeeperClientKerberosJaasConfig(zkPrincipal,
zkKeytab, saslLoginContextName);
+ }
+ if (zkPrincipal != null) {
+ userNameFromPrincipal =
LlapUtil.getUserNameFromPrincipal(zkPrincipal);
+ }
}
zooKeeperClient.start();
}
@@ -681,6 +685,11 @@ public void start() throws IOException {
CloseableUtils.class.getName();
}
+ private static boolean isZkEnforceSASLClient(Configuration conf) {
+ return UserGroupInformation.isSecurityEnabled() &&
+ HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS);
+ }
+
protected void unregisterInternal() {
CloseableUtils.closeQuietly(znode);
}
diff --git
a/llap-client/src/java/org/apache/hadoop/hive/registry/impl/ZookeeperUtils.java
b/llap-client/src/java/org/apache/hadoop/hive/registry/impl/ZookeeperUtils.java
deleted file mode 100644
index c3d34c486dc..00000000000
---
a/llap-client/src/java/org/apache/hadoop/hive/registry/impl/ZookeeperUtils.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/*
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.registry.impl;
-
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
-import javax.security.auth.login.AppConfigurationEntry;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.SecurityUtil;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authentication.util.KerberosUtil;
-import org.apache.zookeeper.client.ZooKeeperSaslClient;
-
-public class ZookeeperUtils {
- private static final Logger LOG =
LoggerFactory.getLogger(ZookeeperUtils.class);
-
- public static String setupZookeeperAuth(Configuration conf, String
saslLoginContextName,
- String zkPrincipal, String zkKeytab) throws IOException {
- // If the login context name is not set, we are in the client and don't
need auth.
- if (isKerberosEnabled(conf) && saslLoginContextName != null) {
- LOG.info("UGI security is enabled. Setting up ZK auth.");
-
- if (zkPrincipal == null || zkPrincipal.isEmpty()) {
- throw new IOException("Kerberos principal is empty");
- }
-
- if (zkKeytab == null || zkKeytab.isEmpty()) {
- throw new IOException("Kerberos keytab is empty");
- }
-
- // Install the JAAS Configuration for the runtime
- return setZookeeperClientKerberosJaasConfig(saslLoginContextName,
zkPrincipal, zkKeytab);
- } else {
- LOG.info("UGI security is not enabled, or no SASL context name. " +
- "Skipping setting up ZK auth.");
- return null;
- }
- }
-
- /**
- * Check if Kerberos authentication is enabled.
- * This is used by:
- * - LLAP daemons
- * - Tez AM
- * - HS2
- * - LLAP status service
- * Among the these Tez AM process has the lowest security setting wrt
Kerberos in UGI.
- * Even in secure scenarios Tez AM will return false for
- * UGI.getLoginUser().hasKerberosCredentials() as it does not log in using
Kerberos.
- * Hence UGI.isSecurityEnabled() is the tightest setting we can check
against.
- */
- public static boolean isKerberosEnabled(Configuration conf) {
- return UserGroupInformation.isSecurityEnabled() &&
- HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS);
- }
-
- /**
- * Dynamically sets up the JAAS configuration that uses kerberos.
- *
- * @param zkPrincipal
- * @param zkKeytab
- * @throws IOException
- */
- private static String setZookeeperClientKerberosJaasConfig(
- String saslLoginContextName, String zkPrincipal, String zkKeytab) throws
IOException {
- // ZooKeeper property name to pick the correct JAAS conf section
- System.setProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY,
saslLoginContextName);
-
- String principal = SecurityUtil.getServerPrincipal(zkPrincipal, "0.0.0.0");
- JaasConfiguration jaasConf = new JaasConfiguration(
- saslLoginContextName, principal, zkKeytab);
-
- // Install the Configuration in the runtime.
- javax.security.auth.login.Configuration.setConfiguration(jaasConf);
- return principal;
- }
-
- /**
- * A JAAS configuration for ZooKeeper clients intended to use for SASL
- * Kerberos.
- */
- private static class JaasConfiguration extends
javax.security.auth.login.Configuration {
- // Current installed Configuration
- private final javax.security.auth.login.Configuration baseConfig =
javax.security.auth.login.Configuration
- .getConfiguration();
- private final String loginContextName;
- private final String principal;
- private final String keyTabFile;
-
- public JaasConfiguration(String loginContextName, String principal, String
keyTabFile) {
- this.loginContextName = loginContextName;
- this.principal = principal;
- this.keyTabFile = keyTabFile;
- }
-
- @Override
- public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
- if (loginContextName.equals(appName)) {
- Map<String, String> krbOptions = new HashMap<String, String>();
- krbOptions.put("doNotPrompt", "true");
- krbOptions.put("storeKey", "true");
- krbOptions.put("useKeyTab", "true");
- krbOptions.put("principal", principal);
- krbOptions.put("keyTab", keyTabFile);
- krbOptions.put("refreshKrb5Config", "true");
- AppConfigurationEntry zooKeeperClientEntry = new AppConfigurationEntry(
- KerberosUtil.getKrb5LoginModuleName(),
- AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, krbOptions);
- return new AppConfigurationEntry[] { zooKeeperClientEntry };
- }
- // Try the base config
- if (baseConfig != null) {
- return baseConfig.getAppConfigurationEntry(appName);
- }
- return null;
- }
- }
-}
diff --git
a/llap-client/src/test/org/apache/hadoop/hive/registry/impl/TestZookeeperUtils.java
b/llap-client/src/test/org/apache/hadoop/hive/registry/impl/TestZookeeperUtils.java
deleted file mode 100644
index 25d427abacf..00000000000
---
a/llap-client/src/test/org/apache/hadoop/hive/registry/impl/TestZookeeperUtils.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.registry.impl;
-
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.MockedStatic;
-import org.mockito.Mockito;
-import org.mockito.junit.MockitoJUnitRunner;
-
-/**
- * ZookeeperUtils test suite.
- */
-@RunWith(MockitoJUnitRunner.class)
-public class TestZookeeperUtils {
-
- private Configuration conf;
- private UserGroupInformation ugi;
-
- MockedStatic<UserGroupInformation> userGroupInformationMockedStatic;
-
- @Before
- public void setup() {
- conf = new Configuration();
- userGroupInformationMockedStatic =
Mockito.mockStatic(UserGroupInformation.class);
-
userGroupInformationMockedStatic.when(UserGroupInformation::isSecurityEnabled).thenReturn(true);
- ugi = Mockito.mock(UserGroupInformation.class);
- UserGroupInformation.setLoginUser(ugi);
- }
-
- @After
- public void teardown() {
- userGroupInformationMockedStatic.close();
- }
-
- /**
- * Secure scenario, invoked e.g. from within HS2 or LLAP daemon process,
kinit'ed inside proc.
- */
- @Test
- public void testHadoopAuthKerberosFromKeytabAndZookeeperUseKerberos() {
- Assert.assertTrue(HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS));
- Assert.assertTrue(ZookeeperUtils.isKerberosEnabled(conf));
- }
-
- /**
- * Secure scenario, invoked e.g. from within HS2 or LLAP status process,
kinit'ed in parent proc.
- */
- @Test
- public void testHadoopAuthKerberosFromTicketAndZookeeperUseKerberos() {
- Assert.assertTrue(HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS));
- Assert.assertTrue(ZookeeperUtils.isKerberosEnabled(conf));
- }
-
- /**
- * Secure scenario, invoked e.g. from within Tez AM process.
- */
- @Test
- public void testHadoopAuthKerberosNoLoginAndZookeeperUseKerberos() {
- Assert.assertTrue(HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS));
- Assert.assertTrue(ZookeeperUtils.isKerberosEnabled(conf));
- }
-
- /**
- * Unsecure scenario.
- */
- @Test
- public void testHadoopAuthSimpleAndZookeeperUseKerberos() {
-
userGroupInformationMockedStatic.when(UserGroupInformation::isSecurityEnabled).thenReturn(false);
- Assert.assertTrue(HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS));
- Assert.assertFalse(ZookeeperUtils.isKerberosEnabled(conf));
- }
-
- /**
- * Secure scenario with hive.zookeeper.kerberos.enabled=false.
- */
- @Test
- public void testHadoopAuthKerberosAndZookeeperNoKerberos(){
- conf.setBoolean(HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS.varname,
false);
- Assert.assertFalse(ZookeeperUtils.isKerberosEnabled(conf));
- }
-
-}
diff --git
a/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java
b/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java
index d355724401c..cc783d5d04d 100644
---
a/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java
+++
b/service/src/java/org/apache/hive/service/server/HS2ActivePassiveHARegistry.java
@@ -83,7 +83,7 @@ static HS2ActivePassiveHARegistry create(Configuration conf,
boolean isClient) {
String keytab = HiveConf.getVar(conf,
HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
String zkNameSpacePrefix = zkNameSpace + "-";
return new HS2ActivePassiveHARegistry(null, zkNameSpacePrefix,
LEADER_LATCH_PATH, principal, keytab,
- isClient ? null : SASL_LOGIN_CONTEXT_NAME, conf, isClient);
+ isClient ? SASL_LOGIN_CONTEXT_NAME : null, conf, isClient);
}
private HS2ActivePassiveHARegistry(final String instanceName, final String
zkNamespacePrefix,
diff --git a/service/src/java/org/apache/hive/service/server/HiveServer2.java
b/service/src/java/org/apache/hive/service/server/HiveServer2.java
index 990beadc88f..048033cbcf3 100644
--- a/service/src/java/org/apache/hive/service/server/HiveServer2.java
+++ b/service/src/java/org/apache/hive/service/server/HiveServer2.java
@@ -72,6 +72,7 @@
import org.apache.hadoop.hive.metastore.api.WMPool;
import org.apache.hadoop.hive.metastore.api.WMResourcePlan;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
import org.apache.hadoop.hive.ql.ServiceContext;
import org.apache.hadoop.hive.ql.cache.results.QueryResultsCache;
import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
@@ -96,9 +97,7 @@
import org.apache.hadoop.hive.ql.txn.compactor.CompactorThread;
import org.apache.hadoop.hive.ql.txn.compactor.CompactorUtil;
import org.apache.hadoop.hive.ql.txn.compactor.Worker;
-import org.apache.hadoop.hive.registry.impl.ZookeeperUtils;
import org.apache.hadoop.hive.shims.ShimLoader;
-import org.apache.hadoop.hive.shims.Utils;
import org.apache.hadoop.hive.common.IPStackUtils;
import org.apache.hadoop.util.ExitUtil;
import org.apache.hive.common.util.HiveStringUtils;
@@ -270,7 +269,7 @@ public synchronized void init(HiveConf hiveConf) {
} catch (Throwable t) {
LOG.warn("Could not initiate the HiveServer2 Metrics system. Metrics
may not be reported.", t);
}
-
+ setUpZooKeeperAuth(hiveConf);
// Do not allow sessions - leader election or initialization will allow
them for an active HS2.
cliService = new CLIService(this, false);
addService(cliService);
@@ -608,7 +607,9 @@ public static boolean isAllTransportMode(HiveConf hiveConf)
{
private ACLProvider zooKeeperAclProvider;
private ACLProvider getACLProvider(HiveConf hiveConf) {
- final boolean isSecure = ZookeeperUtils.isKerberosEnabled(hiveConf);
+ final boolean isSecure =
+ AuthType.isKerberosAuthMode(hiveConf) &&
+ HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS);
return new ACLProvider() {
@Override
@@ -682,18 +683,18 @@ private void addConfsToPublish(HiveConf hiveConf,
Map<String, String> confsToPub
* @return
* @throws Exception
*/
- private static void setUpZooKeeperAuth(HiveConf hiveConf) throws Exception {
- if (ZookeeperUtils.isKerberosEnabled(hiveConf)) {
- String principal =
hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
- if (principal.isEmpty()) {
- throw new IOException("HiveServer2 Kerberos principal is empty");
- }
- String keyTabFile =
hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
- if (keyTabFile.isEmpty()) {
- throw new IOException("HiveServer2 Kerberos keytab is empty");
+ private static void setUpZooKeeperAuth(HiveConf hiveConf) {
+ try {
+ if (AuthType.isKerberosAuthMode(hiveConf) &&
+ StringUtils.isNotEmpty(HiveConf.getVar(hiveConf,
HiveConf.ConfVars.HIVE_ZOOKEEPER_QUORUM)) &&
+ HiveConf.getBoolVar(hiveConf,
HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS)) {
+ // Install the JAAS Configuration for the runtime
+ String principal =
hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
+ String keyTab = hiveConf.getVar(ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
+ SecurityUtils.setZookeeperClientKerberosJaasConfig(principal, keyTab);
}
- // Install the JAAS Configuration for the runtime
- Utils.setZookeeperClientKerberosJaasConfig(principal, keyTabFile);
+ } catch (Exception e) {
+ throw new RuntimeException("Failed to configure the jaas for ZooKeeper",
e);
}
}
@@ -1186,7 +1187,6 @@ public void startPrivilegeSynchronizer(HiveConf hiveConf)
throws Exception {
}
if (policyContainer.size() > 0) {
- setUpZooKeeperAuth(hiveConf);
zKClientForPrivSync =
hiveConf.getZKConfig().startZookeeperClient(zooKeeperAclProvider, true);
String rootNamespace =
hiveConf.getVar(HiveConf.ConfVars.HIVE_SERVER2_ZOOKEEPER_NAMESPACE);
String path = ZooKeeperHiveHelper.ZOOKEEPER_PATH_SEPARATOR +
rootNamespace
diff --git
a/service/src/java/org/apache/hive/service/server/KillQueryZookeeperManager.java
b/service/src/java/org/apache/hive/service/server/KillQueryZookeeperManager.java
index 396364bf79f..36f2c54a9a3 100644
---
a/service/src/java/org/apache/hive/service/server/KillQueryZookeeperManager.java
+++
b/service/src/java/org/apache/hive/service/server/KillQueryZookeeperManager.java
@@ -31,7 +31,7 @@
import org.apache.curator.utils.PathUtils;
import org.apache.curator.utils.ZKPaths;
import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.registry.impl.ZookeeperUtils;
+import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hive.service.AbstractService;
import org.apache.hive.service.ServiceException;
import org.apache.hive.service.cli.operation.OperationManager;
@@ -61,12 +61,11 @@
public class KillQueryZookeeperManager extends AbstractService {
private static final Logger LOG =
LoggerFactory.getLogger(KillQueryZookeeperManager.class);
- private static final String SASL_LOGIN_CONTEXT_NAME =
"KillQueryZooKeeperClient";
public static final int MAX_WAIT_ON_CONFIRMATION_SECONDS = 30;
public static final int MAX_WAIT_ON_KILL_SECONDS = 180;
private CuratorFramework zooKeeperClient;
- private String zkPrincipal, zkKeytab, zkNameSpace;
+ private String zkNameSpace;
private final KillQueryImpl localKillQueryImpl;
private final HiveServer2 hiveServer2;
private HiveConf conf;
@@ -86,8 +85,6 @@ public synchronized void init(HiveConf conf) {
zkNameSpace = HiveConf.getVar(conf,
HiveConf.ConfVars.HIVE_ZOOKEEPER_KILLQUERY_NAMESPACE);
Preconditions.checkArgument(!StringUtils.isBlank(zkNameSpace),
HiveConf.ConfVars.HIVE_ZOOKEEPER_KILLQUERY_NAMESPACE.varname + "
cannot be null or empty");
- this.zkPrincipal = HiveConf.getVar(conf,
HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL);
- this.zkKeytab = HiveConf.getVar(conf,
HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_KEYTAB);
this.zooKeeperClient =
conf.getZKConfig().getNewZookeeperClient(getACLProviderForZKPath("/" +
zkNameSpace));
this.zooKeeperClient.getConnectionStateListenable().addListener(new
ZkConnectionStateListener());
@@ -101,11 +98,11 @@ public synchronized void start() {
throw new ServiceException("Failed start zookeeperClient in
KillQueryZookeeperManager");
}
try {
- ZookeeperUtils.setupZookeeperAuth(this.getHiveConf(),
SASL_LOGIN_CONTEXT_NAME, zkPrincipal, zkKeytab);
zooKeeperClient.start();
try {
zooKeeperClient.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath("/"
+ zkNameSpace);
- if (ZookeeperUtils.isKerberosEnabled(conf)) {
+ if (UserGroupInformation.isSecurityEnabled() &&
+ HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS)) {
zooKeeperClient.setACL().withACL(createSecureAcls()).forPath("/" +
zkNameSpace);
}
LOG.info("Created the root namespace: " + zkNameSpace + " on
ZooKeeper");
@@ -128,7 +125,8 @@ public synchronized void start() {
}
private ACLProvider getACLProviderForZKPath(String zkPath) {
- final boolean isSecure = ZookeeperUtils.isKerberosEnabled(conf);
+ final boolean isSecure = UserGroupInformation.isSecurityEnabled() &&
+ HiveConf.getBoolVar(conf,
HiveConf.ConfVars.HIVE_ZOOKEEPER_USE_KERBEROS);
return new ACLProvider() {
@Override
public List<ACL> getDefaultAcl() {
diff --git a/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
b/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
index 12566d8d93b..515b10b35f0 100644
--- a/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
+++ b/shims/common/src/main/java/org/apache/hadoop/hive/shims/Utils.java
@@ -20,13 +20,9 @@
import java.io.IOException;
import java.util.Arrays;
-import java.util.HashMap;
import java.util.HashSet;
-import java.util.Map;
import java.util.Set;
-import javax.security.auth.login.AppConfigurationEntry;
-import javax.security.auth.login.AppConfigurationEntry.LoginModuleControlFlag;
import javax.security.auth.login.LoginException;
import javax.servlet.Filter;
import javax.servlet.FilterChain;
@@ -39,10 +35,7 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.security.authentication.util.KerberosUtil;
-import org.apache.zookeeper.client.ZooKeeperSaslClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -50,8 +43,6 @@ public class Utils {
private static final Logger LOG = LoggerFactory.getLogger(Utils.class);
public static final String RAW_RESERVED_VIRTUAL_PATH = "/.reserved/raw/";
- private static final boolean IBM_JAVA = System.getProperty("java.vendor")
- .contains("IBM");
public static final String DISTCP_OPTIONS_PREFIX = "distcp.options.";
@@ -73,72 +64,6 @@ public static UserGroupInformation getUGI() throws
LoginException, IOException {
return UserGroupInformation.getCurrentUser();
}
- /**
- * Dynamically sets up the JAAS configuration that uses kerberos
- * @param principal
- * @param keyTabFile
- * @throws IOException
- */
- public static void setZookeeperClientKerberosJaasConfig(String principal,
String keyTabFile) throws IOException {
- // ZooKeeper property name to pick the correct JAAS conf section
- final String SASL_LOGIN_CONTEXT_NAME = "HiveZooKeeperClient";
- System.setProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY,
SASL_LOGIN_CONTEXT_NAME);
-
- principal = SecurityUtil.getServerPrincipal(principal, "0.0.0.0");
- JaasConfiguration jaasConf = new
JaasConfiguration(SASL_LOGIN_CONTEXT_NAME, principal, keyTabFile);
-
- // Install the Configuration in the runtime.
- javax.security.auth.login.Configuration.setConfiguration(jaasConf);
- }
-
- /**
- * A JAAS configuration for ZooKeeper clients intended to use for SASL
- * Kerberos.
- */
- private static class JaasConfiguration extends
javax.security.auth.login.Configuration {
- // Current installed Configuration
- private static final boolean IBM_JAVA = System.getProperty("java.vendor")
- .contains("IBM");
- private final javax.security.auth.login.Configuration baseConfig =
javax.security.auth.login.Configuration
- .getConfiguration();
- private final String loginContextName;
- private final String principal;
- private final String keyTabFile;
-
- public JaasConfiguration(String hiveLoginContextName, String principal,
String keyTabFile) {
- this.loginContextName = hiveLoginContextName;
- this.principal = principal;
- this.keyTabFile = keyTabFile;
- }
-
- @Override
- public AppConfigurationEntry[] getAppConfigurationEntry(String appName) {
- if (loginContextName.equals(appName)) {
- Map<String, String> krbOptions = new HashMap<String, String>();
- if (IBM_JAVA) {
- krbOptions.put("credsType", "both");
- krbOptions.put("useKeytab", keyTabFile);
- } else {
- krbOptions.put("doNotPrompt", "true");
- krbOptions.put("storeKey", "true");
- krbOptions.put("useKeyTab", "true");
- krbOptions.put("keyTab", keyTabFile);
- }
- krbOptions.put("principal", principal);
- krbOptions.put("refreshKrb5Config", "true");
- AppConfigurationEntry hiveZooKeeperClientEntry = new
AppConfigurationEntry(
- KerberosUtil.getKrb5LoginModuleName(),
LoginModuleControlFlag.REQUIRED, krbOptions);
- return new AppConfigurationEntry[] { hiveZooKeeperClientEntry };
- }
- // Try the base config
- if (baseConfig != null) {
- return baseConfig.getAppConfigurationEntry(appName);
- }
- return null;
- }
- }
-
-
public static final String XSRF_CUSTOM_HEADER_PARAM = "custom-header";
public static final String XSRF_CUSTOM_METHODS_TO_IGNORE_PARAM =
"methods-to-ignore";
private static final String XSRF_HEADER_DEFAULT = "X-XSRF-HEADER";
diff --git
a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java
b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java
index 24a50145364..78f71e41bbb 100644
---
a/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java
+++
b/standalone-metastore/metastore-client/src/main/java/org/apache/hadoop/hive/metastore/client/ThriftHiveMetaStoreClient.java
@@ -262,6 +262,10 @@ private void resolveUris() throws MetaException {
if (serviceDiscoveryMode == null ||
serviceDiscoveryMode.trim().isEmpty()) {
metastoreUrisString = Arrays.asList(thriftUris.split(","));
} else if (serviceDiscoveryMode.equalsIgnoreCase("zookeeper")) {
+ if (MetastoreConf.getBoolVar(conf,
MetastoreConf.ConfVars.USE_THRIFT_SASL) &&
+ MetastoreConf.getBoolVar(conf,
MetastoreConf.ConfVars.THRIFT_ZOOKEEPER_USE_KERBEROS)) {
+ SecurityUtils.setZookeeperClientKerberosJaasConfig(null, null);
+ }
metastoreUrisString = new ArrayList<String>();
// Add scheme to the bare URI we get.
for (String s : MetastoreConf.getZKConfig(conf).getServerUris()) {
@@ -3936,4 +3940,9 @@ public Map<String, Map<String, String>>
getProperties(String nameSpace, String m
PropertyGetResponse response = client.get_properties(request);
return response.getProperties();
}
+
+ @VisibleForTesting
+ public URI[] getMetastoreUris() {
+ return metastoreUris;
+ }
}
diff --git
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java
index ec593ae7a36..a4f7f87cfec 100644
---
a/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java
+++
b/standalone-metastore/metastore-common/src/main/java/org/apache/hadoop/hive/metastore/utils/SecurityUtils.java
@@ -19,6 +19,7 @@
import com.google.common.base.Preconditions;
import java.io.FileInputStream;
+import java.lang.reflect.Method;
import java.security.KeyManagementException;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
@@ -74,6 +75,21 @@
public class SecurityUtils {
private static final Logger LOG =
LoggerFactory.getLogger(SecurityUtils.class);
+ private static Method getKeytab = null;
+
+ static {
+ Class<?> clz = UserGroupInformation.class;
+
+ try {
+ getKeytab = clz.getDeclaredMethod("getKeytab");
+ getKeytab.setAccessible(true);
+ } catch (NoSuchMethodException nme) {
+ LOG.warn("Cannot find private method \"getKeytab\" in class:" +
+ UserGroupInformation.class.getCanonicalName(), nme);
+ getKeytab = null;
+ }
+ }
+
public static UserGroupInformation getUGI() throws LoginException,
IOException {
String doAs = System.getenv("HADOOP_USER_NAME");
if (doAs != null && doAs.length() > 0) {
@@ -87,15 +103,42 @@ public static UserGroupInformation getUGI() throws
LoginException, IOException {
}
return UserGroupInformation.getCurrentUser();
}
+
/**
* Dynamically sets up the JAAS configuration that uses kerberos
* @param principal
* @param keyTabFile
* @throws IOException
*/
- public static void setZookeeperClientKerberosJaasConfig(String principal,
String keyTabFile) throws IOException {
+ public static void setZookeeperClientKerberosJaasConfig(String principal,
String keyTabFile,
+ String... loginContext) throws IOException {
// ZooKeeper property name to pick the correct JAAS conf section
- final String SASL_LOGIN_CONTEXT_NAME = "HiveZooKeeperClient";
+ String loginContextName = "HiveZooKeeperClient";
+ if (loginContext != null && loginContext.length > 0) {
+ loginContextName = loginContext[0];
+ }
+ final String SASL_LOGIN_CONTEXT_NAME = loginContextName;
+ if (System.getProperties().containsKey(SASL_LOGIN_CONTEXT_NAME)) {
+ LOG.info("ZooKeeper JAAS context: {} has been set, ignore...",
SASL_LOGIN_CONTEXT_NAME);
+ return;
+ }
+ // For client to authenticate with zk, where we don't know the exact
principal.
+ if (StringUtils.isEmpty(principal) || StringUtils.isEmpty(keyTabFile)) {
+ UserGroupInformation loginUser = UserGroupInformation.getLoginUser();
+ try {
+ if (getKeytab != null && UserGroupInformation.isSecurityEnabled() &&
+ UserGroupInformation.isLoginKeytabBased()) {
+ principal = loginUser.getUserName();
+ keyTabFile = (String) getKeytab.invoke(loginUser);
+ } else if (System.getProperty("java.security.auth.login.config") !=
null) {
+ // The client provides the jaas configuration, use it
+ return;
+ }
+ } catch (Exception e) {
+ throw new IOException("Failed to get the principal or keyTab from
UGI", e);
+ }
+ }
+
System.setProperty(ZooKeeperSaslClient.LOGIN_CONTEXT_NAME_KEY,
SASL_LOGIN_CONTEXT_NAME);
principal = SecurityUtil.getServerPrincipal(principal, "0.0.0.0");
@@ -131,14 +174,23 @@ public AppConfigurationEntry[]
getAppConfigurationEntry(String appName) {
Map<String, String> krbOptions = new HashMap<String, String>();
if (IBM_JAVA) {
krbOptions.put("credsType", "both");
- krbOptions.put("useKeytab", keyTabFile);
+ if (keyTabFile != null) {
+ krbOptions.put("useKeytab", keyTabFile);
+ } else {
+ krbOptions.put("useDefaultCcache", "true");
+ }
} else {
krbOptions.put("doNotPrompt", "true");
- krbOptions.put("storeKey", "true");
- krbOptions.put("useKeyTab", "true");
- krbOptions.put("keyTab", keyTabFile);
+ if (keyTabFile != null) {
+ krbOptions.put("useKeyTab", "true");
+ krbOptions.put("keyTab", keyTabFile);
+ } else {
+ krbOptions.put("useTicketCache", "true");
+ }
+ }
+ if (principal != null) {
+ krbOptions.put("principal", principal);
}
- krbOptions.put("principal", principal);
krbOptions.put("refreshKrb5Config", "true");
AppConfigurationEntry hiveZooKeeperClientEntry = new
AppConfigurationEntry(
KerberosUtil.getKrb5LoginModuleName(),
LoginModuleControlFlag.REQUIRED, krbOptions);
diff --git
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
index 180baef67bf..e0368373a60 100644
---
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
+++
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/HiveMetaStore.java
@@ -324,18 +324,6 @@ public static void main(String[] args) throws Throwable {
}
}
- /**
- * Start Metastore based on a passed {@link HadoopThriftAuthBridge}.
- *
- * @param port
- * @param bridge
- * @throws Throwable
- */
- public static void startMetaStore(int port, HadoopThriftAuthBridge bridge)
- throws Throwable {
- startMetaStore(port, bridge, MetastoreConf.newMetastoreConf(), false,
null);
- }
-
/**
* Start the metastore store.
* @param port
@@ -685,6 +673,12 @@ private static void
constraintHttpMethods(ServletContextHandler ctxHandler, bool
public static void startMetaStore(int port, HadoopThriftAuthBridge bridge,
Configuration conf, boolean startMetaStoreThreads, AtomicBoolean
startedBackgroundThreads) throws Throwable {
isMetaStoreRemote = true;
+ if (MetastoreConf.getBoolVar(conf, MetastoreConf.ConfVars.USE_THRIFT_SASL)
&&
+ MetastoreConf.getBoolVar(conf,
MetastoreConf.ConfVars.THRIFT_ZOOKEEPER_USE_KERBEROS)) {
+ String principal = MetastoreConf.getVar(conf,
ConfVars.KERBEROS_PRINCIPAL);
+ String keyTab = MetastoreConf.getVar(conf,
ConfVars.KERBEROS_KEYTAB_FILE);
+ SecurityUtils.setZookeeperClientKerberosJaasConfig(principal, keyTab);
+ }
String transportMode = MetastoreConf.getVar(conf,
ConfVars.THRIFT_TRANSPORT_MODE, "binary");
boolean isHttpTransport = transportMode.equalsIgnoreCase("http");
if (isHttpTransport) {
diff --git
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStore.java
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStore.java
index 2603771784a..88dc761852e 100644
---
a/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStore.java
+++
b/standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/security/ZooKeeperTokenStore.java
@@ -33,7 +33,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.common.ZooKeeperHiveHelper;
import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
-import org.apache.hadoop.hive.metastore.utils.SecurityUtils;
import org.apache.hadoop.security.UserGroupInformation;
import
org.apache.hadoop.security.token.delegation.AbstractDelegationTokenSecretManager.DelegationTokenInformation;
import
org.apache.hadoop.security.token.delegation.MetastoreDelegationTokenSupport;
@@ -159,41 +158,6 @@ public CuratorFramework getSession() {
return zkSession;
}
- private void setupJAASConfig(Configuration conf) throws IOException {
- if (!isKerberosEnabled(conf)) {
- // The process has not logged in using keytab
- // this should be a test mode, can't use keytab to authenticate
- // with zookeeper.
- LOGGER.warn("Login is not from keytab");
- return;
- }
-
- String principal;
- String keytab;
- switch (serverMode) {
- case METASTORE:
- principal = getNonEmptyConfVar(conf,
"hive.metastore.kerberos.principal");
- keytab = getNonEmptyConfVar(conf, "hive.metastore.kerberos.keytab.file");
- break;
- case HIVESERVER2:
- principal = getNonEmptyConfVar(conf,
"hive.server2.authentication.kerberos.principal");
- keytab = getNonEmptyConfVar(conf,
"hive.server2.authentication.kerberos.keytab");
- break;
- default:
- throw new AssertionError("Unexpected server mode " + serverMode);
- }
- SecurityUtils.setZookeeperClientKerberosJaasConfig(principal, keytab);
- }
-
- private String getNonEmptyConfVar(Configuration conf, String param) throws
IOException {
- String val = conf.get(param);
- if (val == null || val.trim().isEmpty()) {
- throw new IOException("Configuration parameter " + param + " should be
set, "
- + WHEN_ZK_DSTORE_MSG);
- }
- return val;
- }
-
/**
* Create a path if it does not already exist ("mkdir -p")
* @param path string with '/' separator
@@ -547,13 +511,6 @@ public void init(Object hmsHandler,
HadoopThriftAuthBridge.Server.ServerMode sMo
conf.get(MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ZNODE,
MetastoreDelegationTokenManager.DELEGATION_TOKEN_STORE_ZK_ZNODE_DEFAULT) +
serverMode;
- try {
- // Install the JAAS Configuration for the runtime
- setupJAASConfig(conf);
- } catch (IOException e) {
- throw new TokenStoreException("Error setting up JAAS configuration for
zookeeper client "
- + e.getMessage(), e);
- }
initClientAndPaths();
}