HDDS-6. Enable SCM kerberos auth. Contributed by Ajay Kumar.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/998df5aa Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/998df5aa Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/998df5aa Branch: refs/heads/HDDS-4 Commit: 998df5aad3a9605424e55513de7efcbbd48ed487 Parents: 6348097 Author: Xiaoyu Yao <x...@apache.org> Authored: Wed May 9 15:56:03 2018 -0700 Committer: Xiaoyu Yao <x...@apache.org> Committed: Tue May 15 16:55:52 2018 -0700 ---------------------------------------------------------------------- .../authentication/util/KerberosUtil.java | 2 +- .../conf/TestConfigurationFieldsBase.java | 2 + .../java/org/apache/hadoop/hdds/HddsUtils.java | 13 +- .../apache/hadoop/hdds/scm/ScmConfigKeys.java | 10 +- .../scm/protocol/ScmBlockLocationProtocol.java | 3 + .../StorageContainerLocationProtocol.java | 4 + .../protocolPB/ScmBlockLocationProtocolPB.java | 6 + .../StorageContainerLocationProtocolPB.java | 4 + .../apache/hadoop/ozone/OzoneConfigKeys.java | 4 +- .../common/src/main/resources/ozone-default.xml | 41 +++- .../StorageContainerDatanodeProtocol.java | 4 + .../StorageContainerDatanodeProtocolPB.java | 6 + .../scm/server/StorageContainerManager.java | 49 ++++- .../StorageContainerManagerHttpServer.java | 5 +- .../ozone/client/protocol/ClientProtocol.java | 3 + hadoop-ozone/common/src/main/bin/start-ozone.sh | 13 +- hadoop-ozone/common/src/main/bin/stop-ozone.sh | 13 +- hadoop-ozone/integration-test/pom.xml | 6 + .../hadoop/ozone/MiniOzoneClusterImpl.java | 17 +- .../hadoop/ozone/TestSecureOzoneCluster.java | 205 +++++++++++++++++++ .../ozone/TestStorageContainerManager.java | 4 +- 21 files changed, 365 insertions(+), 49 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java index c011045..4459928 100644 --- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java +++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/util/KerberosUtil.java @@ -167,7 +167,7 @@ public class KerberosUtil { } /* Return fqdn of the current host */ - static String getLocalHostName() throws UnknownHostException { + public static String getLocalHostName() throws UnknownHostException { return InetAddress.getLocalHost().getCanonicalHostName(); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java index 7f27d7d..c20733d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfigurationFieldsBase.java @@ -436,6 +436,8 @@ public abstract class TestConfigurationFieldsBase { // Create XML key/value map LOG_XML.debug("Reading XML property files\n"); xmlKeyValueMap = extractPropertiesFromXml(xmlFilename); + // Remove hadoop property set in ozone-default.xml + xmlKeyValueMap.remove("hadoop.custom.tags"); LOG_XML.debug("\n=====\n"); // Create default configuration variable key/value map http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java index 48c6dce..17c99bb 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/HddsUtils.java @@ -241,18 +241,7 @@ public final class HddsUtils { } public static boolean isHddsEnabled(Configuration conf) { - String securityEnabled = - conf.get(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, - "simple"); - boolean securityAuthorizationEnabled = conf.getBoolean( - CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, false); - - if (securityEnabled.equals("kerberos") || securityAuthorizationEnabled) { - LOG.error("Ozone is not supported in a security enabled cluster. "); - return false; - } else { - return conf.getBoolean(OZONE_ENABLED, OZONE_ENABLED_DEFAULT); - } + return conf.getBoolean(OZONE_ENABLED, OZONE_ENABLED_DEFAULT); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java index 29ccf30..83a431e 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/ScmConfigKeys.java @@ -128,8 +128,9 @@ public final class ScmConfigKeys { "ozone.scm.http-address"; public static final String OZONE_SCM_HTTPS_ADDRESS_KEY = "ozone.scm.https-address"; - public static final String OZONE_SCM_KEYTAB_FILE = - "ozone.scm.keytab.file"; + public static final String OZONE_SCM_KERBEROS_KEYTAB_FILE_KEY = + "ozone.scm.kerberos.keytab.file"; + public static final String OZONE_SCM_KERBEROS_PRINCIPAL_KEY = "ozone.scm.kerberos.principal"; public static final String OZONE_SCM_HTTP_BIND_HOST_DEFAULT = "0.0.0.0"; public static final int OZONE_SCM_HTTP_BIND_PORT_DEFAULT = 9876; public static final int OZONE_SCM_HTTPS_BIND_PORT_DEFAULT = 9877; @@ -275,6 +276,11 @@ public final class ScmConfigKeys { public static final String OZONE_SCM_CONTAINER_CLOSE_THRESHOLD = "ozone.scm.container.close.threshold"; public static final float OZONE_SCM_CONTAINER_CLOSE_THRESHOLD_DEFAULT = 0.9f; + + public static final String SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY = + "ozone.scm.web.authentication.kerberos.principal"; + public static final String SCM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE_KEY = + "ozone.scm.web.authentication.kerberos.keytab"; /** * Never constructed. */ http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java index c8d4a80..e17f1c2 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/ScmBlockLocationProtocol.java @@ -17,6 +17,8 @@ */ package org.apache.hadoop.hdds.scm.protocol; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.security.KerberosInfo; import org.apache.hadoop.hdds.scm.ScmInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.AllocatedBlock; import org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor; @@ -31,6 +33,7 @@ import java.util.List; * ScmBlockLocationProtocol is used by an HDFS node to find the set of nodes * to read/write a block. */ +@KerberosInfo(serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY) public interface ScmBlockLocationProtocol { /** http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java index e8d85e0..d36bdf3 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocol/StorageContainerLocationProtocol.java @@ -17,6 +17,8 @@ package org.apache.hadoop.hdds.scm.protocol; +import org.apache.hadoop.hdds.HddsConfigKeys; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.hadoop.hdds.scm.ScmInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerInfo; import org.apache.hadoop.hdds.scm.container.common.helpers.Pipeline; @@ -27,11 +29,13 @@ import org.apache.hadoop.hdds.protocol.proto import java.io.IOException; import java.util.EnumSet; import java.util.List; +import org.apache.hadoop.security.KerberosInfo; /** * ContainerLocationProtocol is used by an HDFS node to find the set of nodes * that currently host a container. */ +@KerberosInfo(serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY) public interface StorageContainerLocationProtocol { /** * Asks SCM where a container should be allocated. SCM responds with the http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java index 837c95b..89bb066 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/ScmBlockLocationProtocolPB.java @@ -18,9 +18,13 @@ package org.apache.hadoop.hdds.scm.protocolPB; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.hdds.HddsConfigKeys; import org.apache.hadoop.hdds.protocol.proto.ScmBlockLocationProtocolProtos .ScmBlockLocationProtocolService; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.ipc.ProtocolInfo; +import org.apache.hadoop.security.KerberosInfo; /** * Protocol used from an HDFS node to StorageContainerManager. This extends the @@ -30,6 +34,8 @@ import org.apache.hadoop.ipc.ProtocolInfo; "org.apache.hadoop.ozone.protocol.ScmBlockLocationProtocol", protocolVersion = 1) @InterfaceAudience.Private +@KerberosInfo( + serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY) public interface ScmBlockLocationProtocolPB extends ScmBlockLocationProtocolService.BlockingInterface { } http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java index f234ad3..3bd83f9 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/hdds/scm/protocolPB/StorageContainerLocationProtocolPB.java @@ -21,7 +21,9 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.hdds.protocol.proto .StorageContainerLocationProtocolProtos .StorageContainerLocationProtocolService; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.hadoop.ipc.ProtocolInfo; +import org.apache.hadoop.security.KerberosInfo; /** * Protocol used from an HDFS node to StorageContainerManager. This extends the @@ -30,6 +32,8 @@ import org.apache.hadoop.ipc.ProtocolInfo; @ProtocolInfo(protocolName = "org.apache.hadoop.ozone.protocol.StorageContainerLocationProtocol", protocolVersion = 1) +@KerberosInfo( + serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY) @InterfaceAudience.Private public interface StorageContainerLocationProtocolPB extends StorageContainerLocationProtocolService.BlockingInterface { http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java index b8dbd7b..affe298 100644 --- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java +++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/ozone/OzoneConfigKeys.java @@ -243,7 +243,9 @@ public final class OzoneConfigKeys { public static final String HDDS_DATANODE_PLUGINS_KEY = "hdds.datanode.plugins"; - + public static final String OZONE_SECURITY_ENABLED_KEY = "ozone.security.enabled"; + public static final String OZONE_SYSTEM_TAGS_KEY = "ozone.system.tags"; + public static final boolean OZONE_SECURITY_ENABLED_DEFAULT = false; /** * There is no need to instantiate this class. */ http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-hdds/common/src/main/resources/ozone-default.xml ---------------------------------------------------------------------- diff --git a/hadoop-hdds/common/src/main/resources/ozone-default.xml b/hadoop-hdds/common/src/main/resources/ozone-default.xml index 774b1b8..46c67fd 100644 --- a/hadoop-hdds/common/src/main/resources/ozone-default.xml +++ b/hadoop-hdds/common/src/main/resources/ozone-default.xml @@ -776,14 +776,6 @@ </description> </property> <property> - <name>ozone.scm.keytab.file</name> - <value/> - <tag>OZONE, SECURITY</tag> - <description> - The keytab file for Kerberos authentication in SCM. - </description> - </property> - <property> <name>ozone.scm.max.container.report.threads</name> <value>100</value> <tag>OZONE, PERFORMANCE</tag> @@ -1058,4 +1050,37 @@ </description> </property> + <property> + <name>ozone.security.enabled</name> + <value>false</value> + <tag> OZONE, SECURITY, FLAG</tag> + <description>True if security is enabled for ozone. When this property is true, hadoop.security.authentication should be Kerberos. + </description> + </property> + + <property> + <name>ozone.scm.kerberos.keytab.file</name> + <value></value> + <tag> OZONE, SECURITY</tag> + <description> The keytab file used by each SCM daemon to login as its + service principal. The principal name is configured with + ozone.scm.kerberos.principal. + </description> + </property> + <property> + <name>ozone.scm.kerberos.principal</name> + <value></value> + <tag> OZONE, SECURITY</tag> + <description>The SCM service principal. Ex scm/_h...@realm.tld.</description> + </property> + + <property> + <name>ozone.scm.web.authentication.kerberos.principal</name> + <value>HTTP/_h...@example.com</value> + </property> + <property> + <name>ozone.scm.web.authentication.kerberos.keytab</name> + <value>/etc/security/keytabs/HTTP.keytab</value> + </property> + </configuration> \ No newline at end of file http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java index cb657276..80887e7 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocol/StorageContainerDatanodeProtocol.java @@ -41,11 +41,15 @@ import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos.SCMVersionResponseProto; import java.io.IOException; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.security.KerberosInfo; /** * The protocol spoken between datanodes and SCM. For specifics please the * Protoc file that defines this protocol. */ +@KerberosInfo( + serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY) @InterfaceAudience.Private public interface StorageContainerDatanodeProtocol { /** http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java index 9b28b5a..9c32ef8 100644 --- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java +++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/protocolPB/StorageContainerDatanodeProtocolPB.java @@ -19,7 +19,10 @@ package org.apache.hadoop.ozone.protocolPB; import org.apache.hadoop.hdds.protocol.proto .StorageContainerDatanodeProtocolProtos .StorageContainerDatanodeProtocolService; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.ipc.ProtocolInfo; +import org.apache.hadoop.security.KerberosInfo; /** * Protocol used from a datanode to StorageContainerManager. This extends @@ -29,6 +32,9 @@ import org.apache.hadoop.ipc.ProtocolInfo; @ProtocolInfo(protocolName = "org.apache.hadoop.ozone.protocol.StorageContainerDatanodeProtocol", protocolVersion = 1) +@KerberosInfo( + serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY, + clientPrincipal = DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY) public interface StorageContainerDatanodeProtocolPB extends StorageContainerDatanodeProtocolService.BlockingInterface { } http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java index 0fd6843..21c797d 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManager.java @@ -28,9 +28,11 @@ import com.google.common.cache.RemovalListener; import com.google.common.cache.RemovalNotification; import com.google.protobuf.BlockingService; import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdds.HddsUtils; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.protocol.proto.HddsProtos.NodeState; +import org.apache.hadoop.hdds.scm.HddsServerUtil; import org.apache.hadoop.hdds.scm.block.BlockManager; import org.apache.hadoop.hdds.scm.block.BlockManagerImpl; import org.apache.hadoop.hdds.scm.container.ContainerMapping; @@ -53,6 +55,9 @@ import org.apache.hadoop.ozone.OzoneConfigKeys; import org.apache.hadoop.ozone.common.Storage.StorageState; import org.apache.hadoop.ozone.common.StorageInfo; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; +import org.apache.hadoop.security.SecurityUtil; +import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.apache.hadoop.util.GenericOptionsParser; import org.apache.hadoop.util.StringUtils; import org.slf4j.Logger; @@ -71,6 +76,10 @@ import java.util.concurrent.TimeUnit; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_DB_CACHE_SIZE_DEFAULT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_DB_CACHE_SIZE_MB; import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ENABLED; +import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SECURITY_ENABLED_DEFAULT; +import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SECURITY_ENABLED_KEY; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY; +import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_KERBEROS_KEYTAB_FILE_KEY; import static org.apache.hadoop.util.ExitUtil.terminate; /** @@ -141,6 +150,7 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl * Key = DatanodeUuid, value = ContainerStat. */ private Cache<String, ContainerStat> containerReportCache; + private Configuration scmConf; /** * Creates a new StorageContainerManager. Configuration will be updated @@ -149,13 +159,19 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl * * @param conf configuration */ - private StorageContainerManager(OzoneConfiguration conf) throws IOException { + private StorageContainerManager(OzoneConfiguration conf) + throws IOException, AuthenticationException { final int cacheSize = conf.getInt(OZONE_SCM_DB_CACHE_SIZE_MB, OZONE_SCM_DB_CACHE_SIZE_DEFAULT); - + this.scmConf = conf; StorageContainerManager.initMetrics(); initContainerReportCache(conf); + // Authenticate SCM if security is enabled + if (this.scmConf.getBoolean(OZONE_SECURITY_ENABLED_KEY, + OZONE_SECURITY_ENABLED_DEFAULT)) { + loginAsSCMUser(this.scmConf); + } scmStorage = new SCMStorage(conf); if (scmStorage.getState() != StorageState.INITIALIZED) { @@ -186,6 +202,33 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl } /** + * Login as the configured user for SCM. + * + * @param conf + */ + private void loginAsSCMUser(Configuration conf) + throws IOException, AuthenticationException { + LOG.debug("Ozone security is enabled. Attempting login for SCM user. " + + "Principal: {}, keytab: {}", this.scmConf.get + (OZONE_SCM_KERBEROS_PRINCIPAL_KEY), + this.scmConf.get(OZONE_SCM_KERBEROS_KEYTAB_FILE_KEY)); + + if (SecurityUtil.getAuthenticationMethod(conf).equals + (AuthenticationMethod.KERBEROS)) { + UserGroupInformation.setConfiguration(this.scmConf); + InetSocketAddress socAddr = HddsServerUtil + .getScmBlockClientBindAddress(conf); + SecurityUtil.login(conf, OZONE_SCM_KERBEROS_KEYTAB_FILE_KEY, + OZONE_SCM_KERBEROS_PRINCIPAL_KEY, socAddr.getHostName()); + } else { + throw new AuthenticationException(SecurityUtil.getAuthenticationMethod + (conf) + " authentication method not support. " + + "SCM user login failed."); + } + LOG.info("SCM login successful."); + } + + /** * Builds a message for logging startup information about an RPC server. * * @param description RPC server description @@ -269,7 +312,7 @@ public class StorageContainerManager extends ServiceRuntimeInfoImpl public static StorageContainerManager createSCM(String[] argv, OzoneConfiguration conf) - throws IOException { + throws IOException, AuthenticationException { if (!HddsUtils.isHddsEnabled(conf)) { System.err.println( "SCM cannot be started in secure mode or when " + OZONE_ENABLED + "" + http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java ---------------------------------------------------------------------- diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java index 75b2036..da936ad 100644 --- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java +++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/server/StorageContainerManagerHttpServer.java @@ -20,7 +20,6 @@ package org.apache.hadoop.hdds.scm.server; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.hadoop.hdds.server.BaseHttpServer; -import org.apache.hadoop.ozone.OzoneConfigKeys; import java.io.IOException; @@ -63,11 +62,11 @@ public class StorageContainerManagerHttpServer extends BaseHttpServer { } @Override protected String getKeytabFile() { - return ScmConfigKeys.OZONE_SCM_KEYTAB_FILE; + return ScmConfigKeys.SCM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE_KEY; } @Override protected String getSpnegoPrincipal() { - return OzoneConfigKeys.OZONE_SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL; + return ScmConfigKeys.SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY; } @Override protected String getEnabledKey() { http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java index 94cc257..80b0a40 100644 --- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java +++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/protocol/ClientProtocol.java @@ -19,6 +19,7 @@ package org.apache.hadoop.ozone.client.protocol; import org.apache.hadoop.fs.StorageType; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; import org.apache.hadoop.ozone.OzoneAcl; import org.apache.hadoop.ozone.client.BucketArgs; import org.apache.hadoop.ozone.client.OzoneBucket; @@ -33,6 +34,7 @@ import org.apache.hadoop.ozone.client.io.OzoneOutputStream; import java.io.IOException; import java.util.List; +import org.apache.hadoop.security.KerberosInfo; /** * An implementer of this interface is capable of connecting to Ozone Cluster @@ -42,6 +44,7 @@ import java.util.List; * includes: {@link org.apache.hadoop.ozone.client.rpc.RpcClient} for RPC and * {@link org.apache.hadoop.ozone.client.rest.RestClient} for REST. */ +@KerberosInfo(serverPrincipal = ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY) public interface ClientProtocol { /** http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-ozone/common/src/main/bin/start-ozone.sh ---------------------------------------------------------------------- diff --git a/hadoop-ozone/common/src/main/bin/start-ozone.sh b/hadoop-ozone/common/src/main/bin/start-ozone.sh index dda0a1c..ba0f714 100644 --- a/hadoop-ozone/common/src/main/bin/start-ozone.sh +++ b/hadoop-ozone/common/src/main/bin/start-ozone.sh @@ -47,13 +47,12 @@ else exit 1 fi -SECURITY_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authentication | tr '[:upper:]' '[:lower:]' 2>&-) -SECURITY_AUTHORIZATION_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authorization | tr '[:upper:]' '[:lower:]' 2>&-) - -if [[ ${SECURITY_ENABLED} == "kerberos" || ${SECURITY_AUTHORIZATION_ENABLED} == "true" ]]; then - echo "Ozone is not supported in a security enabled cluster." - exit 1 -fi +#SECURITY_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authentication | tr '[:upper:]' '[:lower:]' 2>&-) +#SECURITY_AUTHORIZATION_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authorization | tr '[:upper:]' '[:lower:]' 2>&-) +#if [[ ${SECURITY_ENABLED} == "kerberos" || ${SECURITY_AUTHORIZATION_ENABLED} == "true" ]]; then +# echo "Ozone is not supported in a security enabled cluster." +# exit 1 +#fi #--------------------------------------------------------- # Check if ozone is enabled http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-ozone/common/src/main/bin/stop-ozone.sh ---------------------------------------------------------------------- diff --git a/hadoop-ozone/common/src/main/bin/stop-ozone.sh b/hadoop-ozone/common/src/main/bin/stop-ozone.sh index be55be4..ff332f2 100644 --- a/hadoop-ozone/common/src/main/bin/stop-ozone.sh +++ b/hadoop-ozone/common/src/main/bin/stop-ozone.sh @@ -47,13 +47,12 @@ else exit 1 fi -SECURITY_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authentication | tr '[:upper:]' '[:lower:]' 2>&-) -SECURITY_AUTHORIZATION_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authorization | tr '[:upper:]' '[:lower:]' 2>&-) - -if [[ ${SECURITY_ENABLED} == "kerberos" || ${SECURITY_AUTHORIZATION_ENABLED} == "true" ]]; then - echo "Ozone is not supported in a security enabled cluster." - exit 1 -fi +#SECURITY_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authentication | tr '[:upper:]' '[:lower:]' 2>&-) +#SECURITY_AUTHORIZATION_ENABLED=$("${HADOOP_HDFS_HOME}/bin/ozone" getozoneconf -confKey hadoop.security.authorization | tr '[:upper:]' '[:lower:]' 2>&-) +#if [[ ${SECURITY_ENABLED} == "kerberos" || ${SECURITY_AUTHORIZATION_ENABLED} == "true" ]]; then +# echo "Ozone is not supported in a security enabled cluster." +# exit 1 +#fi #--------------------------------------------------------- # Check if ozone is enabled http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-ozone/integration-test/pom.xml ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/pom.xml b/hadoop-ozone/integration-test/pom.xml index c8a932c..4aa1aa5 100644 --- a/hadoop-ozone/integration-test/pom.xml +++ b/hadoop-ozone/integration-test/pom.xml @@ -42,6 +42,12 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd"> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-minikdc</artifactId> + <scope>test</scope> + </dependency> + + <dependency> + <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-ozone-objectstore-service</artifactId> <scope>provided</scope> </dependency> http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java index 08d7176..e4f8e62 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/MiniOzoneClusterImpl.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hdds.scm.protocolPB import org.apache.hadoop.hdds.scm.protocolPB.StorageContainerLocationProtocolPB; import org.apache.hadoop.hdds.scm.server.StorageContainerManager; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.apache.hadoop.test.GenericTestUtils; import org.slf4j.Logger; @@ -287,9 +288,16 @@ public final class MiniOzoneClusterImpl implements MiniOzoneCluster { public MiniOzoneCluster build() throws IOException { DefaultMetricsSystem.setMiniClusterMode(true); initializeConfiguration(); - StorageContainerManager scm = createSCM(); - scm.start(); - KeySpaceManager ksm = createKSM(); + StorageContainerManager scm; + KeySpaceManager ksm; + try { + scm = createSCM(); + scm.start(); + ksm = createKSM(); + } catch (AuthenticationException ex) { + throw new IOException("Unable to build MiniOzoneCluster. ", ex); + } + ksm.start(); List<HddsDatanodeService> hddsDatanodes = createHddsDatanodes(scm); hddsDatanodes.forEach((datanode) -> datanode.start(null)); @@ -316,7 +324,8 @@ public final class MiniOzoneClusterImpl implements MiniOzoneCluster { * * @throws IOException */ - private StorageContainerManager createSCM() throws IOException { + private StorageContainerManager createSCM() + throws IOException, AuthenticationException { configureSCM(); SCMStorage scmStore = new SCMStorage(conf); scmStore.setClusterId(clusterId); http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java new file mode 100644 index 0000000..9c430ad --- /dev/null +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestSecureOzoneCluster.java @@ -0,0 +1,205 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.ozone; + +import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_ADMINISTRATORS; +import static org.apache.hadoop.ozone.OzoneConfigKeys.OZONE_SECURITY_ENABLED_KEY; + +import java.io.File; +import java.io.IOException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.Properties; +import java.util.UUID; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.hdds.conf.OzoneConfiguration; +import org.apache.hadoop.hdds.scm.ScmConfigKeys; +import org.apache.hadoop.hdds.scm.ScmInfo; +import org.apache.hadoop.hdds.scm.server.SCMStorage; +import org.apache.hadoop.hdds.scm.server.StorageContainerManager; +import org.apache.hadoop.minikdc.MiniKdc; +import org.apache.hadoop.security.KerberosAuthException; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.authentication.util.KerberosUtil; +import org.apache.hadoop.test.GenericTestUtils; +import org.apache.hadoop.test.LambdaTestUtils; +import org.junit.Assert; +import org.junit.Before; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Test class to for security enabled Ozone cluster. + */ +@InterfaceAudience.Private +public final class TestSecureOzoneCluster { + + private Logger LOGGER = LoggerFactory + .getLogger(TestSecureOzoneCluster.class); + + private MiniKdc miniKdc; + private OzoneConfiguration conf; + private File workDir; + private static Properties securityProperties; + private File scmKeytab; + private File spnegoKeytab; + private String curUser; + + @Before + public void init() { + try { + conf = new OzoneConfiguration(); + startMiniKdc(); + setSecureConfig(conf); + createCredentialsInKDC(conf, miniKdc); + } catch (IOException e) { + LOGGER.error("Failed to initialize TestSecureOzoneCluster", e); + } catch (Exception e) { + LOGGER.error("Failed to initialize TestSecureOzoneCluster", e); + } + } + + private void createCredentialsInKDC(Configuration conf, MiniKdc miniKdc) + throws Exception { + createPrincipal(scmKeytab, + conf.get(ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY)); + createPrincipal(spnegoKeytab, + conf.get(ScmConfigKeys.SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY)); + } + + private void createPrincipal(File keytab, String... principal) + throws Exception { + miniKdc.createPrincipal(keytab, principal); + } + + private void startMiniKdc() throws Exception { + workDir = GenericTestUtils + .getTestDir(TestSecureOzoneCluster.class.getSimpleName()); + securityProperties = MiniKdc.createConf(); + miniKdc = new MiniKdc(securityProperties, workDir); + miniKdc.start(); + } + + private void setSecureConfig(Configuration conf) throws IOException { + conf.setBoolean(OZONE_SECURITY_ENABLED_KEY, true); + String host = KerberosUtil.getLocalHostName(); + String realm = miniKdc.getRealm(); + curUser = UserGroupInformation.getCurrentUser() + .getUserName(); + conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, + "kerberos"); + conf.set(OZONE_ADMINISTRATORS, curUser); + + conf.set(ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY, + "scm/" + host + "@" + realm); + conf.set(ScmConfigKeys.SCM_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, + "HTTP_SCM/" + host + "@" + realm); + + scmKeytab = new File(workDir, "scm.keytab"); + spnegoKeytab = new File(workDir, "http.keytab"); + + conf.set(ScmConfigKeys.OZONE_SCM_KERBEROS_KEYTAB_FILE_KEY, + scmKeytab.getAbsolutePath()); + conf.set(ScmConfigKeys.SCM_WEB_AUTHENTICATION_KERBEROS_KEYTAB_FILE_KEY, + spnegoKeytab.getAbsolutePath()); + + } + + @Test + public void testSecureScmStartupSuccess() throws Exception { + final String path = GenericTestUtils + .getTempPath(UUID.randomUUID().toString()); + Path scmPath = Paths.get(path, "scm-meta"); + conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, scmPath.toString()); + conf.setBoolean(OzoneConfigKeys.OZONE_ENABLED, true); + SCMStorage scmStore = new SCMStorage(conf); + String clusterId = UUID.randomUUID().toString(); + String scmId = UUID.randomUUID().toString(); + scmStore.setClusterId(clusterId); + scmStore.setScmId(scmId); + // writes the version file properties + scmStore.initialize(); + StorageContainerManager scm = StorageContainerManager.createSCM(null, conf); + //Reads the SCM Info from SCM instance + ScmInfo scmInfo = scm.getClientProtocolServer().getScmInfo(); + Assert.assertEquals(clusterId, scmInfo.getClusterId()); + Assert.assertEquals(scmId, scmInfo.getScmId()); + } + + @Test + public void testSecureScmStartupFailure() throws Exception { + final String path = GenericTestUtils + .getTempPath(UUID.randomUUID().toString()); + Path scmPath = Paths.get(path, "scm-meta"); + + OzoneConfiguration conf = new OzoneConfiguration(); + conf.setBoolean(OZONE_SECURITY_ENABLED_KEY, true); + conf.set(OzoneConfigKeys.OZONE_METADATA_DIRS, scmPath.toString()); + conf.setBoolean(OzoneConfigKeys.OZONE_ENABLED, true); + conf.set(ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY, + "scm@" + miniKdc.getRealm()); + conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, + "kerberos"); + + SCMStorage scmStore = new SCMStorage(conf); + String clusterId = UUID.randomUUID().toString(); + String scmId = UUID.randomUUID().toString(); + scmStore.setClusterId(clusterId); + scmStore.setScmId(scmId); + // writes the version file properties + scmStore.initialize(); + LambdaTestUtils.intercept(IOException.class, + "Running in secure mode, but config doesn't have a keytab", + () -> { + StorageContainerManager.createSCM(null, conf); + }); + + conf.set(ScmConfigKeys.OZONE_SCM_KERBEROS_PRINCIPAL_KEY, + "scm/_h...@example.com"); + conf.set(ScmConfigKeys.OZONE_SCM_KERBEROS_KEYTAB_FILE_KEY, + "/etc/security/keytabs/scm.keytab"); + + LambdaTestUtils.intercept(KerberosAuthException.class, "failure " + + "to login: for principal:", + () -> { + StorageContainerManager.createSCM(null, conf); + }); + conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, + "OAuth2"); + + LambdaTestUtils.intercept(IllegalArgumentException.class, "Invalid" + + " attribute value for hadoop.security.authentication of OAuth2", + () -> { + StorageContainerManager.createSCM(null, conf); + }); + + conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, + "KERBEROS_SSL"); + LambdaTestUtils.intercept(AuthenticationException.class, + "KERBEROS_SSL authentication method not support.", + () -> { + StorageContainerManager.createSCM(null, conf); + }); + + } + +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/998df5aa/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java ---------------------------------------------------------------------- diff --git a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java index 0081f0d..8e8df7a 100644 --- a/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java +++ b/hadoop-ozone/integration-test/src/test/java/org/apache/hadoop/ozone/TestStorageContainerManager.java @@ -42,6 +42,7 @@ import org.apache.hadoop.hdds.scm.block.SCMBlockDeletingService; import org.apache.hadoop.hdds.scm.exceptions.SCMException; import org.apache.hadoop.hdds.scm.XceiverClientManager; import org.apache.hadoop.hdds.scm.ScmInfo; +import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.junit.Rule; import org.junit.Assert; import org.junit.Test; @@ -409,7 +410,8 @@ public class TestStorageContainerManager { } @Test - public void testSCMInitializationFailure() throws IOException { + public void testSCMInitializationFailure() + throws IOException, AuthenticationException { OzoneConfiguration conf = new OzoneConfiguration(); final String path = GenericTestUtils.getTempPath(UUID.randomUUID().toString()); --------------------------------------------------------------------- To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org For additional commands, e-mail: common-commits-h...@hadoop.apache.org