HADOOP-11934. Use of JavaKeyStoreProvider in LdapGroupsMapping causes infinite loop. Contributed by Larry McCay.
Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/3fcb33b3 Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/3fcb33b3 Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/3fcb33b3 Branch: refs/heads/YARN-2928 Commit: 3fcb33b37d05bd6af730fd6dfbc054c2686a7553 Parents: b17498d Author: cnauroth <cnaur...@apache.org> Authored: Thu May 28 15:01:42 2015 -0700 Committer: Zhijie Shen <zjs...@apache.org> Committed: Tue Jun 2 16:12:56 2015 -0700 ---------------------------------------------------------------------- hadoop-common-project/hadoop-common/CHANGES.txt | 3 + .../alias/AbstractJavaKeyStoreProvider.java | 344 +++++++++++++++++++ .../security/alias/JavaKeyStoreProvider.java | 257 ++------------ .../alias/LocalJavaKeyStoreProvider.java | 192 +++++++++++ ...oop.security.alias.CredentialProviderFactory | 1 + .../alias/TestCredentialProviderFactory.java | 25 +- 6 files changed, 597 insertions(+), 225 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hadoop/blob/3fcb33b3/hadoop-common-project/hadoop-common/CHANGES.txt ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 71b71a0..af82c0b 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -832,6 +832,9 @@ Release 2.7.1 - UNRELEASED HADOOP-11973. Ensure ZkDelegationTokenSecretManager namespace znodes get created with ACLs. (Gregory Chanan via asuresh) + HADOOP-11934. Use of JavaKeyStoreProvider in LdapGroupsMapping causes + infinite loop. (Larry McCay via cnauroth) + Release 2.7.0 - 2015-04-20 INCOMPATIBLE CHANGES http://git-wip-us.apache.org/repos/asf/hadoop/blob/3fcb33b3/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java new file mode 100644 index 0000000..9251044 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/AbstractJavaKeyStoreProvider.java @@ -0,0 +1,344 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.security.alias; + +import org.apache.commons.io.IOUtils; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.ProviderUtils; + +import com.google.common.base.Charsets; + +import javax.crypto.spec.SecretKeySpec; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.URI; +import java.net.URL; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.UnrecoverableKeyException; +import java.security.cert.CertificateException; +import java.util.ArrayList; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.locks.Lock; +import java.util.concurrent.locks.ReadWriteLock; +import java.util.concurrent.locks.ReentrantReadWriteLock; + +/** + * Abstract class for implementing credential providers that are based on + * Java Keystores as the underlying credential store. + * + * The password for the keystore is taken from the HADOOP_CREDSTORE_PASSWORD + * environment variable with a default of 'none'. + * + * It is expected that for access to credential protected resource to copy the + * creds from the original provider into the job's Credentials object, which is + * accessed via the UserProvider. Therefore, these providers won't be directly + * used by MapReduce tasks. + */ +@InterfaceAudience.Private +public abstract class AbstractJavaKeyStoreProvider extends CredentialProvider { + public static final String CREDENTIAL_PASSWORD_NAME = + "HADOOP_CREDSTORE_PASSWORD"; + public static final String KEYSTORE_PASSWORD_FILE_KEY = + "hadoop.security.credstore.java-keystore-provider.password-file"; + public static final String KEYSTORE_PASSWORD_DEFAULT = "none"; + + private Path path; + private final URI uri; + private final KeyStore keyStore; + private char[] password = null; + private boolean changed = false; + private Lock readLock; + private Lock writeLock; + + protected AbstractJavaKeyStoreProvider(URI uri, Configuration conf) + throws IOException { + this.uri = uri; + initFileSystem(uri, conf); + // Get the password from the user's environment + if (System.getenv().containsKey(CREDENTIAL_PASSWORD_NAME)) { + password = System.getenv(CREDENTIAL_PASSWORD_NAME).toCharArray(); + } + // if not in ENV get check for file + if (password == null) { + String pwFile = conf.get(KEYSTORE_PASSWORD_FILE_KEY); + if (pwFile != null) { + ClassLoader cl = Thread.currentThread().getContextClassLoader(); + URL pwdFile = cl.getResource(pwFile); + if (pwdFile != null) { + try (InputStream is = pwdFile.openStream()) { + password = IOUtils.toString(is).trim().toCharArray(); + } + } + } + } + if (password == null) { + password = KEYSTORE_PASSWORD_DEFAULT.toCharArray(); + } + try { + keyStore = KeyStore.getInstance("jceks"); + if (keystoreExists()) { + stashOriginalFilePermissions(); + try (InputStream in = getInputStreamForFile()) { + keyStore.load(in, password); + } + } else { + createPermissions("700"); + // required to create an empty keystore. *sigh* + keyStore.load(null, password); + } + } catch (KeyStoreException e) { + throw new IOException("Can't create keystore", e); + } catch (NoSuchAlgorithmException e) { + throw new IOException("Can't load keystore " + getPathAsString(), e); + } catch (CertificateException e) { + throw new IOException("Can't load keystore " + getPathAsString(), e); + } + ReadWriteLock lock = new ReentrantReadWriteLock(true); + readLock = lock.readLock(); + writeLock = lock.writeLock(); + } + + public Path getPath() { + return path; + } + + public void setPath(Path p) { + this.path = p; + } + + public char[] getPassword() { + return password; + } + + public void setPassword(char[] pass) { + this.password = pass; + } + + public boolean isChanged() { + return changed; + } + + public void setChanged(boolean chg) { + this.changed = chg; + } + + public Lock getReadLock() { + return readLock; + } + + public void setReadLock(Lock rl) { + this.readLock = rl; + } + + public Lock getWriteLock() { + return writeLock; + } + + public void setWriteLock(Lock wl) { + this.writeLock = wl; + } + + public URI getUri() { + return uri; + } + + public KeyStore getKeyStore() { + return keyStore; + } + + public Map<String, CredentialEntry> getCache() { + return cache; + } + + private final Map<String, CredentialEntry> cache = + new HashMap<String, CredentialEntry>(); + + protected final String getPathAsString() { + return getPath().toString(); + } + + protected abstract String getSchemeName(); + + protected abstract OutputStream getOutputStreamForKeystore() + throws IOException; + + protected abstract boolean keystoreExists() throws IOException; + + protected abstract InputStream getInputStreamForFile() throws IOException; + + protected abstract void createPermissions(String perms) throws IOException; + + protected abstract void stashOriginalFilePermissions() throws IOException; + + protected void initFileSystem(URI keystoreUri, Configuration conf) + throws IOException { + path = ProviderUtils.unnestUri(keystoreUri); + } + + @Override + public CredentialEntry getCredentialEntry(String alias) + throws IOException { + readLock.lock(); + try { + SecretKeySpec key = null; + try { + if (cache.containsKey(alias)) { + return cache.get(alias); + } + if (!keyStore.containsAlias(alias)) { + return null; + } + key = (SecretKeySpec) keyStore.getKey(alias, password); + } catch (KeyStoreException e) { + throw new IOException("Can't get credential " + alias + " from " + + getPathAsString(), e); + } catch (NoSuchAlgorithmException e) { + throw new IOException("Can't get algorithm for credential " + alias + + " from " + getPathAsString(), e); + } catch (UnrecoverableKeyException e) { + throw new IOException("Can't recover credential " + alias + " from " + + getPathAsString(), e); + } + return new CredentialEntry(alias, bytesToChars(key.getEncoded())); + } finally { + readLock.unlock(); + } + } + + public static char[] bytesToChars(byte[] bytes) throws IOException { + String pass; + pass = new String(bytes, Charsets.UTF_8); + return pass.toCharArray(); + } + + @Override + public List<String> getAliases() throws IOException { + readLock.lock(); + try { + ArrayList<String> list = new ArrayList<String>(); + String alias = null; + try { + Enumeration<String> e = keyStore.aliases(); + while (e.hasMoreElements()) { + alias = e.nextElement(); + list.add(alias); + } + } catch (KeyStoreException e) { + throw new IOException("Can't get alias " + alias + " from " + + getPathAsString(), e); + } + return list; + } finally { + readLock.unlock(); + } + } + + @Override + public CredentialEntry createCredentialEntry(String alias, char[] credential) + throws IOException { + writeLock.lock(); + try { + if (keyStore.containsAlias(alias) || cache.containsKey(alias)) { + throw new IOException("Credential " + alias + " already exists in " + + this); + } + return innerSetCredential(alias, credential); + } catch (KeyStoreException e) { + throw new IOException("Problem looking up credential " + alias + " in " + + this, e); + } finally { + writeLock.unlock(); + } + } + + @Override + public void deleteCredentialEntry(String name) throws IOException { + writeLock.lock(); + try { + try { + if (keyStore.containsAlias(name)) { + keyStore.deleteEntry(name); + } else { + throw new IOException("Credential " + name + " does not exist in " + + this); + } + } catch (KeyStoreException e) { + throw new IOException("Problem removing " + name + " from " + this, e); + } + cache.remove(name); + changed = true; + } finally { + writeLock.unlock(); + } + } + + CredentialEntry innerSetCredential(String alias, char[] material) + throws IOException { + writeLock.lock(); + try { + keyStore.setKeyEntry(alias, + new SecretKeySpec(new String(material).getBytes("UTF-8"), "AES"), + password, null); + } catch (KeyStoreException e) { + throw new IOException("Can't store credential " + alias + " in " + this, + e); + } finally { + writeLock.unlock(); + } + changed = true; + return new CredentialEntry(alias, material); + } + + @Override + public void flush() throws IOException { + writeLock.lock(); + try { + if (!changed) { + return; + } + // write out the keystore + try (OutputStream out = getOutputStreamForKeystore()) { + keyStore.store(out, password); + } catch (KeyStoreException e) { + throw new IOException("Can't store keystore " + this, e); + } catch (NoSuchAlgorithmException e) { + throw new IOException("No such algorithm storing keystore " + this, e); + } catch (CertificateException e) { + throw new IOException("Certificate exception storing keystore " + this, + e); + } + changed = false; + } finally { + writeLock.unlock(); + } + } + + @Override + public String toString() { + return uri.toString(); + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/3fcb33b3/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java index 5e5cebb..d02ebde 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/JavaKeyStoreProvider.java @@ -18,266 +18,75 @@ package org.apache.hadoop.security.alias; -import org.apache.commons.io.Charsets; -import org.apache.commons.io.IOUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; -import org.apache.hadoop.security.ProviderUtils; -import javax.crypto.spec.SecretKeySpec; import java.io.IOException; import java.io.InputStream; +import java.io.OutputStream; import java.net.URI; -import java.net.URL; -import java.security.KeyStore; -import java.security.KeyStoreException; -import java.security.NoSuchAlgorithmException; -import java.security.UnrecoverableKeyException; -import java.security.cert.CertificateException; -import java.util.ArrayList; -import java.util.Enumeration; -import java.util.HashMap; -import java.util.List; -import java.util.Map; -import java.util.concurrent.locks.Lock; -import java.util.concurrent.locks.ReadWriteLock; -import java.util.concurrent.locks.ReentrantReadWriteLock; /** - * CredentialProvider based on Java's KeyStore file format. The file may be + * CredentialProvider based on Java's KeyStore file format. The file may be * stored in any Hadoop FileSystem using the following name mangling: - * jceks://h...@nn1.example.com/my/creds.jceks -> hdfs://nn1.example.com/my/creds.jceks - * jceks://file/home/larry/creds.jceks -> file:///home/larry/creds.jceks - * - * The password for the keystore is taken from the HADOOP_CREDSTORE_PASSWORD - * environment variable with a default of 'none'. - * - * It is expected that for access to credential protected resource to copy the - * creds from the original provider into the job's Credentials object, which is - * accessed via the UserProvider. Therefore, this provider won't be directly - * used by MapReduce tasks. + * jceks://h...@nn1.example.com/my/creds.jceks -> + * hdfs://nn1.example.com/my/creds.jceks jceks://file/home/larry/creds.jceks -> + * file:///home/larry/creds.jceks */ @InterfaceAudience.Private -public class JavaKeyStoreProvider extends CredentialProvider { +public class JavaKeyStoreProvider extends AbstractJavaKeyStoreProvider { public static final String SCHEME_NAME = "jceks"; - public static final String CREDENTIAL_PASSWORD_NAME = - "HADOOP_CREDSTORE_PASSWORD"; - public static final String KEYSTORE_PASSWORD_FILE_KEY = - "hadoop.security.credstore.java-keystore-provider.password-file"; - public static final String KEYSTORE_PASSWORD_DEFAULT = "none"; - - private final URI uri; - private final Path path; - private final FileSystem fs; - private final FsPermission permissions; - private final KeyStore keyStore; - private char[] password = null; - private boolean changed = false; - private Lock readLock; - private Lock writeLock; - - private final Map<String, CredentialEntry> cache = new HashMap<String, CredentialEntry>(); - - private JavaKeyStoreProvider(URI uri, Configuration conf) throws IOException { - this.uri = uri; - path = ProviderUtils.unnestUri(uri); - fs = path.getFileSystem(conf); - // Get the password from the user's environment - if (System.getenv().containsKey(CREDENTIAL_PASSWORD_NAME)) { - password = System.getenv(CREDENTIAL_PASSWORD_NAME).toCharArray(); - } - // if not in ENV get check for file - if (password == null) { - String pwFile = conf.get(KEYSTORE_PASSWORD_FILE_KEY); - if (pwFile != null) { - ClassLoader cl = Thread.currentThread().getContextClassLoader(); - URL pwdFile = cl.getResource(pwFile); - if (pwdFile != null) { - try (InputStream is = pwdFile.openStream()) { - password = IOUtils.toString(is).trim().toCharArray(); - } - } - } - } - if (password == null) { - password = KEYSTORE_PASSWORD_DEFAULT.toCharArray(); - } - try { - keyStore = KeyStore.getInstance(SCHEME_NAME); - if (fs.exists(path)) { - // save off permissions in case we need to - // rewrite the keystore in flush() - FileStatus s = fs.getFileStatus(path); - permissions = s.getPermission(); + private FileSystem fs; + private FsPermission permissions; - try (FSDataInputStream in = fs.open(path)) { - keyStore.load(in, password); - } - } else { - permissions = new FsPermission("700"); - // required to create an empty keystore. *sigh* - keyStore.load(null, password); - } - } catch (KeyStoreException e) { - throw new IOException("Can't create keystore", e); - } catch (NoSuchAlgorithmException e) { - throw new IOException("Can't load keystore " + path, e); - } catch (CertificateException e) { - throw new IOException("Can't load keystore " + path, e); - } - ReadWriteLock lock = new ReentrantReadWriteLock(true); - readLock = lock.readLock(); - writeLock = lock.writeLock(); + private JavaKeyStoreProvider(URI uri, Configuration conf) + throws IOException { + super(uri, conf); } @Override - public CredentialEntry getCredentialEntry(String alias) throws IOException { - readLock.lock(); - try { - SecretKeySpec key = null; - try { - if (cache.containsKey(alias)) { - return cache.get(alias); - } - if (!keyStore.containsAlias(alias)) { - return null; - } - key = (SecretKeySpec) keyStore.getKey(alias, password); - } catch (KeyStoreException e) { - throw new IOException("Can't get credential " + alias + " from " + - path, e); - } catch (NoSuchAlgorithmException e) { - throw new IOException("Can't get algorithm for credential " + alias + " from " + - path, e); - } catch (UnrecoverableKeyException e) { - throw new IOException("Can't recover credential " + alias + " from " + path, e); - } - return new CredentialEntry(alias, bytesToChars(key.getEncoded())); - } - finally { - readLock.unlock(); - } - } - - public static char[] bytesToChars(byte[] bytes) { - String pass = new String(bytes, Charsets.UTF_8); - return pass.toCharArray(); + protected String getSchemeName() { + return SCHEME_NAME; } @Override - public List<String> getAliases() throws IOException { - readLock.lock(); - try { - ArrayList<String> list = new ArrayList<String>(); - String alias = null; - try { - Enumeration<String> e = keyStore.aliases(); - while (e.hasMoreElements()) { - alias = e.nextElement(); - list.add(alias); - } - } catch (KeyStoreException e) { - throw new IOException("Can't get alias " + alias + " from " + path, e); - } - return list; - } - finally { - readLock.unlock(); - } + protected OutputStream getOutputStreamForKeystore() throws IOException { + FSDataOutputStream out = FileSystem.create(fs, getPath(), permissions); + return out; } @Override - public CredentialEntry createCredentialEntry(String alias, char[] credential) - throws IOException { - writeLock.lock(); - try { - if (keyStore.containsAlias(alias) || cache.containsKey(alias)) { - throw new IOException("Credential " + alias + " already exists in " + this); - } - return innerSetCredential(alias, credential); - } catch (KeyStoreException e) { - throw new IOException("Problem looking up credential " + alias + " in " + this, - e); - } finally { - writeLock.unlock(); - } + protected boolean keystoreExists() throws IOException { + return fs.exists(getPath()); } @Override - public void deleteCredentialEntry(String name) throws IOException { - writeLock.lock(); - try { - try { - if (keyStore.containsAlias(name)) { - keyStore.deleteEntry(name); - } - else { - throw new IOException("Credential " + name + " does not exist in " + this); - } - } catch (KeyStoreException e) { - throw new IOException("Problem removing " + name + " from " + - this, e); - } - cache.remove(name); - changed = true; - } - finally { - writeLock.unlock(); - } + protected InputStream getInputStreamForFile() throws IOException { + return fs.open(getPath()); } - CredentialEntry innerSetCredential(String alias, char[] material) - throws IOException { - writeLock.lock(); - try { - keyStore.setKeyEntry(alias, new SecretKeySpec( - new String(material).getBytes("UTF-8"), "AES"), - password, null); - } catch (KeyStoreException e) { - throw new IOException("Can't store credential " + alias + " in " + this, - e); - } finally { - writeLock.unlock(); - } - changed = true; - return new CredentialEntry(alias, material); + @Override + protected void createPermissions(String perms) { + permissions = new FsPermission(perms); } @Override - public void flush() throws IOException { - writeLock.lock(); - try { - if (!changed) { - return; - } - // write out the keystore - try (FSDataOutputStream out = FileSystem.create(fs, path, permissions)) { - keyStore.store(out, password); - } catch (KeyStoreException e) { - throw new IOException("Can't store keystore " + this, e); - } catch (NoSuchAlgorithmException e) { - throw new IOException("No such algorithm storing keystore " + this, e); - } catch (CertificateException e) { - throw new IOException("Certificate exception storing keystore " + this, - e); - } - changed = false; - } - finally { - writeLock.unlock(); - } + protected void stashOriginalFilePermissions() throws IOException { + // save off permissions in case we need to + // rewrite the keystore in flush() + FileStatus s = fs.getFileStatus(getPath()); + permissions = s.getPermission(); } - @Override - public String toString() { - return uri.toString(); + protected void initFileSystem(URI uri, Configuration conf) + throws IOException { + super.initFileSystem(uri, conf); + fs = getPath().getFileSystem(conf); } /** @@ -286,7 +95,7 @@ public class JavaKeyStoreProvider extends CredentialProvider { public static class Factory extends CredentialProviderFactory { @Override public CredentialProvider createProvider(URI providerName, - Configuration conf) throws IOException { + Configuration conf) throws IOException { if (SCHEME_NAME.equals(providerName.getScheme())) { return new JavaKeyStoreProvider(providerName, conf); } http://git-wip-us.apache.org/repos/asf/hadoop/blob/3fcb33b3/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java new file mode 100644 index 0000000..3840979 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/alias/LocalJavaKeyStoreProvider.java @@ -0,0 +1,192 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.security.alias; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.util.Shell; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.PosixFilePermission; +import java.nio.file.attribute.PosixFilePermissions; +import java.util.Set; +import java.util.StringTokenizer; +import java.util.EnumSet; + +/** + * CredentialProvider based on Java's KeyStore file format. The file may be + * stored only on the local filesystem using the following name mangling: + * localjceks://file/home/larry/creds.jceks -> file:///home/larry/creds.jceks + */ +@InterfaceAudience.Private +public final class LocalJavaKeyStoreProvider extends + AbstractJavaKeyStoreProvider { + public static final String SCHEME_NAME = "localjceks"; + private File file; + private Set<PosixFilePermission> permissions; + + private LocalJavaKeyStoreProvider(URI uri, Configuration conf) + throws IOException { + super(uri, conf); + } + + @Override + protected String getSchemeName() { + return SCHEME_NAME; + } + + @Override + protected OutputStream getOutputStreamForKeystore() throws IOException { + FileOutputStream out = new FileOutputStream(file); + return out; + } + + @Override + protected boolean keystoreExists() throws IOException { + return file.exists(); + } + + @Override + protected InputStream getInputStreamForFile() throws IOException { + FileInputStream is = new FileInputStream(file); + return is; + } + + @Override + protected void createPermissions(String perms) throws IOException { + int mode = 700; + try { + mode = Integer.parseInt(perms, 8); + } catch (NumberFormatException nfe) { + throw new IOException("Invalid permissions mode provided while " + + "trying to createPermissions", nfe); + } + permissions = modeToPosixFilePermission(mode); + } + + @Override + protected void stashOriginalFilePermissions() throws IOException { + // save off permissions in case we need to + // rewrite the keystore in flush() + if (!Shell.WINDOWS) { + Path path = Paths.get(file.getCanonicalPath()); + permissions = Files.getPosixFilePermissions(path); + } else { + // On Windows, the JDK does not support the POSIX file permission APIs. + // Instead, we can do a winutils call and translate. + String[] cmd = Shell.getGetPermissionCommand(); + String[] args = new String[cmd.length + 1]; + System.arraycopy(cmd, 0, args, 0, cmd.length); + args[cmd.length] = file.getCanonicalPath(); + String out = Shell.execCommand(args); + StringTokenizer t = new StringTokenizer(out, Shell.TOKEN_SEPARATOR_REGEX); + // The winutils output consists of 10 characters because of the leading + // directory indicator, i.e. "drwx------". The JDK parsing method expects + // a 9-character string, so remove the leading character. + String permString = t.nextToken().substring(1); + permissions = PosixFilePermissions.fromString(permString); + } + } + + @Override + protected void initFileSystem(URI uri, Configuration conf) + throws IOException { + super.initFileSystem(uri, conf); + try { + file = new File(new URI(getPath().toString())); + } catch (URISyntaxException e) { + throw new IOException(e); + } + } + + @Override + public void flush() throws IOException { + super.flush(); + if (!Shell.WINDOWS) { + Files.setPosixFilePermissions(Paths.get(file.getCanonicalPath()), + permissions); + } else { + // FsPermission expects a 10-character string because of the leading + // directory indicator, i.e. "drwx------". The JDK toString method returns + // a 9-character string, so prepend a leading character. + FsPermission fsPermission = FsPermission.valueOf( + "-" + PosixFilePermissions.toString(permissions)); + FileUtil.setPermission(file, fsPermission); + } + } + + /** + * The factory to create JksProviders, which is used by the ServiceLoader. + */ + public static class Factory extends CredentialProviderFactory { + @Override + public CredentialProvider createProvider(URI providerName, + Configuration conf) throws IOException { + if (SCHEME_NAME.equals(providerName.getScheme())) { + return new LocalJavaKeyStoreProvider(providerName, conf); + } + return null; + } + } + + private static Set<PosixFilePermission> modeToPosixFilePermission( + int mode) { + Set<PosixFilePermission> perms = EnumSet.noneOf(PosixFilePermission.class); + if ((mode & 0001) != 0) { + perms.add(PosixFilePermission.OTHERS_EXECUTE); + } + if ((mode & 0002) != 0) { + perms.add(PosixFilePermission.OTHERS_WRITE); + } + if ((mode & 0004) != 0) { + perms.add(PosixFilePermission.OTHERS_READ); + } + if ((mode & 0010) != 0) { + perms.add(PosixFilePermission.GROUP_EXECUTE); + } + if ((mode & 0020) != 0) { + perms.add(PosixFilePermission.GROUP_WRITE); + } + if ((mode & 0040) != 0) { + perms.add(PosixFilePermission.GROUP_READ); + } + if ((mode & 0100) != 0) { + perms.add(PosixFilePermission.OWNER_EXECUTE); + } + if ((mode & 0200) != 0) { + perms.add(PosixFilePermission.OWNER_WRITE); + } + if ((mode & 0400) != 0) { + perms.add(PosixFilePermission.OWNER_READ); + } + return perms; + } +} http://git-wip-us.apache.org/repos/asf/hadoop/blob/3fcb33b3/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.security.alias.CredentialProviderFactory ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.security.alias.CredentialProviderFactory b/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.security.alias.CredentialProviderFactory index bb7366e..f673cf4 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.security.alias.CredentialProviderFactory +++ b/hadoop-common-project/hadoop-common/src/main/resources/META-INF/services/org.apache.hadoop.security.alias.CredentialProviderFactory @@ -14,4 +14,5 @@ # limitations under the License. org.apache.hadoop.security.alias.JavaKeyStoreProvider$Factory +org.apache.hadoop.security.alias.LocalJavaKeyStoreProvider$Factory org.apache.hadoop.security.alias.UserProvider$Factory http://git-wip-us.apache.org/repos/asf/hadoop/blob/3fcb33b3/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java ---------------------------------------------------------------------- diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java index 1eb1e8b..16cb0be 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/alias/TestCredentialProviderFactory.java @@ -207,7 +207,29 @@ public class TestCredentialProviderFactory { checkPermissionRetention(conf, ourUrl, path); } - public void checkPermissionRetention(Configuration conf, String ourUrl, + @Test + public void testLocalJksProvider() throws Exception { + Configuration conf = new Configuration(); + final Path jksPath = new Path(tmpDir.toString(), "test.jks"); + final String ourUrl = + LocalJavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri(); + + File file = new File(tmpDir, "test.jks"); + file.delete(); + conf.set(CredentialProviderFactory.CREDENTIAL_PROVIDER_PATH, ourUrl); + checkSpecificProvider(conf, ourUrl); + Path path = ProviderUtils.unnestUri(new URI(ourUrl)); + FileSystem fs = path.getFileSystem(conf); + FileStatus s = fs.getFileStatus(path); + assertTrue("Unexpected permissions: " + s.getPermission().toString(), s.getPermission().toString().equals("rwx------")); + assertTrue(file + " should exist", file.isFile()); + + // check permission retention after explicit change + fs.setPermission(path, new FsPermission("777")); + checkPermissionRetention(conf, ourUrl, path); + } + + public void checkPermissionRetention(Configuration conf, String ourUrl, Path path) throws Exception { CredentialProvider provider = CredentialProviderFactory.getProviders(conf).get(0); // let's add a new credential and flush and check that permissions are still set to 777 @@ -233,3 +255,4 @@ public class TestCredentialProviderFactory { "keystore.", s.getPermission().toString().equals("rwxrwxrwx")); } } +