Author: umamahesh Date: Thu May 15 13:03:22 2014 New Revision: 1594906 URL: http://svn.apache.org/r1594906 Log: Merge from trunk to HDFS-2006
Added: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyServers.java - copied unchanged from r1594886, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyServers.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyServers.java - copied unchanged from r1594886, hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyServers.java Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/CHANGES.txt (contents, props changed) hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/ (props changed) hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/CHANGES.txt URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/CHANGES.txt (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/CHANGES.txt Thu May 15 13:03:22 2014 @@ -326,6 +326,8 @@ Trunk (Unreleased) HADOOP-10431. Change visibility of KeyStore.Options getter methods to public. (tucu) + HADOOP-10583. bin/hadoop key throws NPE with no args and assorted other fixups. (clamb via tucu) + OPTIMIZATIONS HADOOP-7761. Improve the performance of raw comparisons. (todd) @@ -380,6 +382,12 @@ Release 2.5.0 - UNRELEASED HADOOP-10158. SPNEGO should work with multiple interfaces/SPNs. (daryn via kihwal) + HADOOP-10566. Refactor proxyservers out of ProxyUsers. + (Benoy Antony via suresh) + + HADOOP-10572. Example NFS mount command must pass noacl as it isn't + supported by the server yet. (Harsh J via brandonli) + OPTIMIZATIONS BUG FIXES @@ -465,6 +473,12 @@ Release 2.5.0 - UNRELEASED because groups stored in Set and ArrayList are compared. (Mit Desai via kihwal) + HADOOP-10585. Retry polices ignore interrupted exceptions (Daryn Sharp via + jeagles) + + HADOOP-10401. ShellBasedUnixGroupsMapping#getGroups does not always return + primary group first (Akira AJISAKA via Colin Patrick McCabe) + Release 2.4.1 - UNRELEASED INCOMPATIBLE CHANGES Propchange: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/CHANGES.txt ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt:r1593928-1594886 Propchange: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/ ------------------------------------------------------------------------------ Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java:r1593928-1594886 Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java Thu May 15 13:03:22 2014 @@ -27,9 +27,7 @@ import java.net.URI; import java.security.NoSuchAlgorithmException; import java.text.MessageFormat; import java.util.Date; -import java.util.LinkedHashMap; import java.util.List; -import java.util.Map; import com.google.gson.stream.JsonReader; import com.google.gson.stream.JsonWriter; @@ -176,22 +174,26 @@ public abstract class KeyProvider { protected byte[] serialize() throws IOException { ByteArrayOutputStream buffer = new ByteArrayOutputStream(); JsonWriter writer = new JsonWriter(new OutputStreamWriter(buffer)); - writer.beginObject(); - if (cipher != null) { - writer.name(CIPHER_FIELD).value(cipher); - } - if (bitLength != 0) { - writer.name(BIT_LENGTH_FIELD).value(bitLength); - } - if (created != null) { - writer.name(CREATED_FIELD).value(created.getTime()); - } - if (description != null) { - writer.name(DESCRIPTION_FIELD).value(description); + try { + writer.beginObject(); + if (cipher != null) { + writer.name(CIPHER_FIELD).value(cipher); + } + if (bitLength != 0) { + writer.name(BIT_LENGTH_FIELD).value(bitLength); + } + if (created != null) { + writer.name(CREATED_FIELD).value(created.getTime()); + } + if (description != null) { + writer.name(DESCRIPTION_FIELD).value(description); + } + writer.name(VERSIONS_FIELD).value(versions); + writer.endObject(); + writer.flush(); + } finally { + writer.close(); } - writer.name(VERSIONS_FIELD).value(versions); - writer.endObject(); - writer.flush(); return buffer.toByteArray(); } @@ -207,23 +209,27 @@ public abstract class KeyProvider { int versions = 0; String description = null; JsonReader reader = new JsonReader(new InputStreamReader - (new ByteArrayInputStream(bytes))); - reader.beginObject(); - while (reader.hasNext()) { - String field = reader.nextName(); - if (CIPHER_FIELD.equals(field)) { - cipher = reader.nextString(); - } else if (BIT_LENGTH_FIELD.equals(field)) { - bitLength = reader.nextInt(); - } else if (CREATED_FIELD.equals(field)) { - created = new Date(reader.nextLong()); - } else if (VERSIONS_FIELD.equals(field)) { - versions = reader.nextInt(); - } else if (DESCRIPTION_FIELD.equals(field)) { - description = reader.nextString(); + (new ByteArrayInputStream(bytes))); + try { + reader.beginObject(); + while (reader.hasNext()) { + String field = reader.nextName(); + if (CIPHER_FIELD.equals(field)) { + cipher = reader.nextString(); + } else if (BIT_LENGTH_FIELD.equals(field)) { + bitLength = reader.nextInt(); + } else if (CREATED_FIELD.equals(field)) { + created = new Date(reader.nextLong()); + } else if (VERSIONS_FIELD.equals(field)) { + versions = reader.nextInt(); + } else if (DESCRIPTION_FIELD.equals(field)) { + description = reader.nextString(); + } } + reader.endObject(); + } finally { + reader.close(); } - reader.endObject(); this.cipher = cipher; this.bitLength = bitLength; this.created = created; @@ -310,7 +316,6 @@ public abstract class KeyProvider { */ public abstract List<String> getKeys() throws IOException; - /** * Get key metadata in bulk. * @param names the names of the keys to get Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java Thu May 15 13:03:22 2014 @@ -23,9 +23,6 @@ import java.io.PrintStream; import java.security.InvalidParameterException; import java.security.NoSuchAlgorithmException; import java.util.List; -import java.util.Map; - -import javax.crypto.KeyGenerator; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; @@ -93,41 +90,54 @@ public class KeyShell extends Configured */ private int init(String[] args) throws IOException { for (int i = 0; i < args.length; i++) { // parse command line + boolean moreTokens = (i < args.length - 1); if (args[i].equals("create")) { - String keyName = args[++i]; + String keyName = "--help"; + if (moreTokens) { + keyName = args[++i]; + } + command = new CreateCommand(keyName); - if (keyName.equals("--help")) { + if ("--help".equals(keyName)) { printKeyShellUsage(); return -1; } } else if (args[i].equals("delete")) { - String keyName = args[++i]; + String keyName = "--help"; + if (moreTokens) { + keyName = args[++i]; + } + command = new DeleteCommand(keyName); - if (keyName.equals("--help")) { + if ("--help".equals(keyName)) { printKeyShellUsage(); return -1; } } else if (args[i].equals("roll")) { - String keyName = args[++i]; + String keyName = "--help"; + if (moreTokens) { + keyName = args[++i]; + } + command = new RollCommand(keyName); - if (keyName.equals("--help")) { + if ("--help".equals(keyName)) { printKeyShellUsage(); return -1; } - } else if (args[i].equals("list")) { + } else if ("list".equals(args[i])) { command = new ListCommand(); - } else if (args[i].equals("--size")) { + } else if ("--size".equals(args[i]) && moreTokens) { getConf().set(KeyProvider.DEFAULT_BITLENGTH_NAME, args[++i]); - } else if (args[i].equals("--cipher")) { + } else if ("--cipher".equals(args[i]) && moreTokens) { getConf().set(KeyProvider.DEFAULT_CIPHER_NAME, args[++i]); - } else if (args[i].equals("--provider")) { + } else if ("--provider".equals(args[i]) && moreTokens) { userSuppliedProvider = true; getConf().set(KeyProviderFactory.KEY_PROVIDER_PATH, args[++i]); - } else if (args[i].equals("--metadata")) { + } else if ("--metadata".equals(args[i])) { getConf().setBoolean(LIST_METADATA, true); - } else if (args[i].equals("-i") || (args[i].equals("--interactive"))) { + } else if ("-i".equals(args[i]) || ("--interactive".equals(args[i]))) { interactive = true; - } else if (args[i].equals("--help")) { + } else if ("--help".equals(args[i])) { printKeyShellUsage(); return -1; } else { @@ -136,6 +146,12 @@ public class KeyShell extends Configured return -1; } } + + if (command == null) { + printKeyShellUsage(); + return -1; + } + return 0; } @@ -143,8 +159,7 @@ public class KeyShell extends Configured out.println(USAGE_PREFIX + COMMANDS); if (command != null) { out.println(command.getUsage()); - } - else { + } else { out.println("=========================================================" + "======"); out.println(CreateCommand.USAGE + ":\n\n" + CreateCommand.DESC); @@ -174,8 +189,7 @@ public class KeyShell extends Configured providers = KeyProviderFactory.getProviders(getConf()); if (userSuppliedProvider) { provider = providers.get(0); - } - else { + } else { for (KeyProvider p : providers) { if (!p.isTransient()) { provider = p; @@ -190,7 +204,7 @@ public class KeyShell extends Configured } protected void printProviderWritten() { - out.println(provider.getClass().getName() + " has been updated."); + out.println(provider + " has been updated."); } protected void warnIfTransientProvider() { @@ -206,12 +220,12 @@ public class KeyShell extends Configured private class ListCommand extends Command { public static final String USAGE = - "list [--provider] [--metadata] [--help]"; + "list [--provider <provider>] [--metadata] [--help]"; public static final String DESC = - "The list subcommand displays the keynames contained within \n" + - "a particular provider - as configured in core-site.xml or " + - "indicated\nthrough the --provider argument.\n" + - "If the --metadata option is used, the keys metadata will be printed"; + "The list subcommand displays the keynames contained within\n" + + "a particular provider as configured in core-site.xml or\n" + + "specified with the --provider argument. --metadata displays\n" + + "the metadata."; private boolean metadata = false; @@ -220,9 +234,9 @@ public class KeyShell extends Configured provider = getKeyProvider(); if (provider == null) { out.println("There are no non-transient KeyProviders configured.\n" - + "Consider using the --provider option to indicate the provider\n" - + "to use. If you want to list a transient provider then you\n" - + "you MUST use the --provider argument."); + + "Use the --provider option to specify a provider. If you\n" + + "want to list a transient provider then you must use the\n" + + "--provider argument."); rc = false; } metadata = getConf().getBoolean(LIST_METADATA, false); @@ -231,12 +245,12 @@ public class KeyShell extends Configured public void execute() throws IOException { try { - List<String> keys = provider.getKeys(); - out.println("Listing keys for KeyProvider: " + provider.toString()); + final List<String> keys = provider.getKeys(); + out.println("Listing keys for KeyProvider: " + provider); if (metadata) { - Metadata[] meta = + final Metadata[] meta = provider.getKeysMetadata(keys.toArray(new String[keys.size()])); - for(int i=0; i < meta.length; ++i) { + for (int i = 0; i < meta.length; ++i) { out.println(keys.get(i) + " : " + meta[i]); } } else { @@ -245,7 +259,7 @@ public class KeyShell extends Configured } } } catch (IOException e) { - out.println("Cannot list keys for KeyProvider: " + provider.toString() + out.println("Cannot list keys for KeyProvider: " + provider + ": " + e.getMessage()); throw e; } @@ -258,11 +272,10 @@ public class KeyShell extends Configured } private class RollCommand extends Command { - public static final String USAGE = "roll <keyname> [--provider] [--help]"; + public static final String USAGE = "roll <keyname> [--provider <provider>] [--help]"; public static final String DESC = - "The roll subcommand creates a new version of the key specified\n" + - "through the <keyname> argument within the provider indicated using\n" + - "the --provider argument"; + "The roll subcommand creates a new version for the specified key\n" + + "within the provider indicated using the --provider argument\n"; String keyName = null; @@ -274,15 +287,14 @@ public class KeyShell extends Configured boolean rc = true; provider = getKeyProvider(); if (provider == null) { - out.println("There are no valid KeyProviders configured.\n" - + "Key will not be rolled.\n" - + "Consider using the --provider option to indicate the provider" - + " to use."); + out.println("There are no valid KeyProviders configured. The key\n" + + "has not been rolled. Use the --provider option to specify\n" + + "a provider."); rc = false; } if (keyName == null) { - out.println("There is no keyName specified. Please provide the" + - "mandatory <keyname>. See the usage description with --help."); + out.println("Please provide a <keyname>.\n" + + "See the usage description by using --help."); rc = false; } return rc; @@ -290,10 +302,9 @@ public class KeyShell extends Configured public void execute() throws NoSuchAlgorithmException, IOException { try { - Metadata md = provider.getMetadata(keyName); warnIfTransientProvider(); out.println("Rolling key version from KeyProvider: " - + provider.toString() + " for key name: " + keyName); + + provider + "\n for key name: " + keyName); try { provider.rollNewVersion(keyName); out.println(keyName + " has been successfully rolled."); @@ -301,12 +312,12 @@ public class KeyShell extends Configured printProviderWritten(); } catch (NoSuchAlgorithmException e) { out.println("Cannot roll key: " + keyName + " within KeyProvider: " - + provider.toString()); + + provider); throw e; } } catch (IOException e1) { out.println("Cannot roll key: " + keyName + " within KeyProvider: " - + provider.toString()); + + provider); throw e1; } } @@ -318,11 +329,11 @@ public class KeyShell extends Configured } private class DeleteCommand extends Command { - public static final String USAGE = "delete <keyname> [--provider] [--help]"; + public static final String USAGE = "delete <keyname> [--provider <provider>] [--help]"; public static final String DESC = - "The delete subcommand deletes all of the versions of the key\n" + - "specified as the <keyname> argument from within the provider\n" + - "indicated through the --provider argument"; + "The delete subcommand deletes all versions of the key\n" + + "specified by the <keyname> argument from within the\n" + + "provider specified --provider."; String keyName = null; boolean cont = true; @@ -335,23 +346,21 @@ public class KeyShell extends Configured public boolean validate() { provider = getKeyProvider(); if (provider == null) { - out.println("There are no valid KeyProviders configured.\n" - + "Nothing will be deleted.\n" - + "Consider using the --provider option to indicate the provider" - + " to use."); + out.println("There are no valid KeyProviders configured. Nothing\n" + + "was deleted. Use the --provider option to specify a provider."); return false; } if (keyName == null) { - out.println("There is no keyName specified. Please provide the" + - "mandatory <keyname>. See the usage description with --help."); + out.println("There is no keyName specified. Please specify a " + + "<keyname>. See the usage description with --help."); return false; } if (interactive) { try { cont = ToolRunner .confirmPrompt("You are about to DELETE all versions of " - + "the key: " + keyName + " from KeyProvider " - + provider.toString() + ". Continue?:"); + + " key: " + keyName + " from KeyProvider " + + provider + ". Continue?:"); if (!cont) { out.println("Nothing has been be deleted."); } @@ -367,7 +376,7 @@ public class KeyShell extends Configured public void execute() throws IOException { warnIfTransientProvider(); out.println("Deleting key: " + keyName + " from KeyProvider: " - + provider.toString()); + + provider); if (cont) { try { provider.deleteKey(keyName); @@ -375,7 +384,7 @@ public class KeyShell extends Configured provider.flush(); printProviderWritten(); } catch (IOException e) { - out.println(keyName + "has NOT been deleted."); + out.println(keyName + " has not been deleted."); throw e; } } @@ -388,16 +397,16 @@ public class KeyShell extends Configured } private class CreateCommand extends Command { - public static final String USAGE = "create <keyname> [--cipher] " + - "[--size] [--provider] [--help]"; + public static final String USAGE = + "create <keyname> [--cipher <cipher>] [--size <size>]\n" + + " [--provider <provider>] [--help]"; public static final String DESC = - "The create subcommand creates a new key for the name specified\n" + - "as the <keyname> argument within the provider indicated through\n" + - "the --provider argument. You may also indicate the specific\n" + - "cipher through the --cipher argument. The default for cipher is\n" + - "currently \"AES/CTR/NoPadding\". The default keysize is \"256\".\n" + - "You may also indicate the requested key length through the --size\n" + - "argument."; + "The create subcommand creates a new key for the name specified\n" + + "by the <keyname> argument within the provider specified by the\n" + + "--provider argument. You may specify a cipher with the --cipher\n" + + "argument. The default cipher is currently \"AES/CTR/NoPadding\".\n" + + "The default keysize is 256. You may specify the requested key\n" + + "length using the --size argument.\n"; String keyName = null; @@ -409,15 +418,14 @@ public class KeyShell extends Configured boolean rc = true; provider = getKeyProvider(); if (provider == null) { - out.println("There are no valid KeyProviders configured.\nKey" + - " will not be created.\n" - + "Consider using the --provider option to indicate the provider" + - " to use."); + out.println("There are no valid KeyProviders configured. No key\n" + + " was created. You can use the --provider option to specify\n" + + " a provider to use."); rc = false; } if (keyName == null) { - out.println("There is no keyName specified. Please provide the" + - "mandatory <keyname>. See the usage description with --help."); + out.println("Please provide a <keyname>. See the usage description" + + " with --help."); rc = false; } return rc; @@ -432,13 +440,13 @@ public class KeyShell extends Configured provider.flush(); printProviderWritten(); } catch (InvalidParameterException e) { - out.println(keyName + " has NOT been created. " + e.getMessage()); + out.println(keyName + " has not been created. " + e.getMessage()); throw e; } catch (IOException e) { - out.println(keyName + " has NOT been created. " + e.getMessage()); + out.println(keyName + " has not been created. " + e.getMessage()); throw e; } catch (NoSuchAlgorithmException e) { - out.println(keyName + " has NOT been created. " + e.getMessage()); + out.println(keyName + " has not been created. " + e.getMessage()); throw e; } } Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java Thu May 15 13:03:22 2014 @@ -126,7 +126,6 @@ public class KMSClientProvider extends K return o; } - public static String checkNotEmpty(String s, String name) throws IllegalArgumentException { checkNotNull(s, name); @@ -140,6 +139,13 @@ public class KMSClientProvider extends K private String kmsUrl; private SSLFactory sslFactory; + @Override + public String toString() { + final StringBuilder sb = new StringBuilder("KMSClientProvider["); + sb.append(kmsUrl).append("]"); + return sb.toString(); + } + public KMSClientProvider(URI uri, Configuration conf) throws IOException { Path path = unnestUri(uri); URL url = path.toUri().toURL(); @@ -515,5 +521,4 @@ public class KMSClientProvider extends K public static String buildVersionName(String name, int version) { return KeyProvider.buildVersionName(name, version); } - } Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java Thu May 15 13:03:22 2014 @@ -158,6 +158,17 @@ public class FsPermission implements Wri return (short)s; } + /** + * Encodes the object to a short. Unlike {@link #toShort()}, this method may + * return values outside the fixed range 00000 - 01777 if extended features + * are encoded into this permission, such as the ACL bit. + * + * @return short extended short representation of this permission + */ + public short toExtendedShort() { + return toShort(); + } + @Override public boolean equals(Object obj) { if (obj instanceof FsPermission) { @@ -273,6 +284,16 @@ public class FsPermission implements Wri return stickyBit; } + /** + * Returns true if there is also an ACL (access control list). + * + * @return boolean true if there is also an ACL (access control list). + */ + public boolean getAclBit() { + // File system subclasses that support the ACL bit would override this. + return false; + } + /** Set the user file creation mask (umask) */ public static void setUMask(Configuration conf, FsPermission umask) { conf.set(UMASK_LABEL, String.format("%1$03o", umask.toShort())); Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/AclCommands.java Thu May 15 13:03:22 2014 @@ -75,29 +75,21 @@ class AclCommands extends FsCommand { @Override protected void processPath(PathData item) throws IOException { - AclStatus aclStatus = item.fs.getAclStatus(item.path); out.println("# file: " + item); - out.println("# owner: " + aclStatus.getOwner()); - out.println("# group: " + aclStatus.getGroup()); - List<AclEntry> entries = aclStatus.getEntries(); - if (aclStatus.isStickyBit()) { - String stickyFlag = "T"; - for (AclEntry aclEntry : entries) { - if (aclEntry.getType() == AclEntryType.OTHER - && aclEntry.getScope() == AclEntryScope.ACCESS - && aclEntry.getPermission().implies(FsAction.EXECUTE)) { - stickyFlag = "t"; - break; - } - } - out.println("# flags: --" + stickyFlag); + out.println("# owner: " + item.stat.getOwner()); + out.println("# group: " + item.stat.getGroup()); + FsPermission perm = item.stat.getPermission(); + if (perm.getStickyBit()) { + out.println("# flags: --" + + (perm.getOtherAction().implies(FsAction.EXECUTE) ? "t" : "T")); } - FsPermission perm = item.stat.getPermission(); - if (entries.isEmpty()) { - printMinimalAcl(perm); - } else { + if (perm.getAclBit()) { + AclStatus aclStatus = item.fs.getAclStatus(item.path); + List<AclEntry> entries = aclStatus.getEntries(); printExtendedAcl(perm, entries); + } else { + printMinimalAcl(perm); } out.println(); Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Ls.java Thu May 15 13:03:22 2014 @@ -31,8 +31,6 @@ import org.apache.hadoop.classification. import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.ipc.RemoteException; -import org.apache.hadoop.ipc.RpcNoSuchMethodException; import com.google.common.collect.Sets; @@ -116,7 +114,7 @@ class Ls extends FsCommand { FileStatus stat = item.stat; String line = String.format(lineFormat, (stat.isDirectory() ? "d" : "-"), - stat.getPermission() + (hasAcl(item) ? "+" : " "), + stat.getPermission() + (stat.getPermission().getAclBit() ? "+" : " "), (stat.isFile() ? stat.getReplication() : "-"), stat.getOwner(), stat.getGroup(), @@ -153,49 +151,6 @@ class Ls extends FsCommand { lineFormat = fmt.toString(); } - /** - * Calls getAclStatus to determine if the given item has an ACL. For - * compatibility, this method traps errors caused by the RPC method missing - * from the server side. This would happen if the client was connected to an - * old NameNode that didn't have the ACL APIs. This method also traps the - * case of the client-side FileSystem not implementing the ACL APIs. - * FileSystem instances that do not support ACLs are remembered. This - * prevents the client from sending multiple failing RPC calls during a - * recursive ls. - * - * @param item PathData item to check - * @return boolean true if item has an ACL - * @throws IOException if there is a failure - */ - private boolean hasAcl(PathData item) throws IOException { - FileSystem fs = item.fs; - if (aclNotSupportedFsSet.contains(fs.getUri())) { - // This FileSystem failed to run the ACL API in an earlier iteration. - return false; - } - try { - return !fs.getAclStatus(item.path).getEntries().isEmpty(); - } catch (RemoteException e) { - // If this is a RpcNoSuchMethodException, then the client is connected to - // an older NameNode that doesn't support ACLs. Keep going. - IOException e2 = e.unwrapRemoteException(RpcNoSuchMethodException.class); - if (!(e2 instanceof RpcNoSuchMethodException)) { - throw e; - } - } catch (IOException e) { - // The NameNode supports ACLs, but they are not enabled. Keep going. - String message = e.getMessage(); - if (message != null && !message.contains("ACLs has been disabled")) { - throw e; - } - } catch (UnsupportedOperationException e) { - // The underlying FileSystem doesn't implement ACLs. Keep going. - } - // Remember that this FileSystem cannot support ACLs. - aclNotSupportedFsSet.add(fs.getUri()); - return false; - } - private int maxLength(int n, Object value) { return Math.max(n, (value != null) ? String.valueOf(value).length() : 0); } Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java Thu May 15 13:03:22 2014 @@ -20,6 +20,7 @@ package org.apache.hadoop.ha; import java.io.IOException; import java.io.PrintStream; import java.util.Arrays; +import java.util.Collection; import java.util.Map; import org.apache.commons.cli.Options; @@ -33,6 +34,7 @@ import org.apache.hadoop.classification. import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo; import org.apache.hadoop.ha.HAServiceProtocol.RequestSource; import org.apache.hadoop.util.Tool; @@ -66,7 +68,7 @@ public abstract class HAAdmin extends Co protected final static Map<String, UsageInfo> USAGE = ImmutableMap.<String, UsageInfo>builder() .put("-transitionToActive", - new UsageInfo("<serviceId>", "Transitions the service into Active state")) + new UsageInfo(" <serviceId> [--"+FORCEACTIVE+"]", "Transitions the service into Active state")) .put("-transitionToStandby", new UsageInfo("<serviceId>", "Transitions the service into Standby state")) .put("-failover", @@ -100,6 +102,10 @@ public abstract class HAAdmin extends Co } protected abstract HAServiceTarget resolveTarget(String string); + + protected Collection<String> getTargetIds(String targetNodeToActivate) { + return Arrays.asList(new String[]{targetNodeToActivate}); + } protected String getUsageString() { return "Usage: HAAdmin"; @@ -133,6 +139,11 @@ public abstract class HAAdmin extends Co printUsage(errOut, "-transitionToActive"); return -1; } + /* returns true if other target node is active or some exception occurred + and forceActive was not set */ + if(isOtherTargetNodeActive(argv[0], cmd.hasOption(FORCEACTIVE))) { + return -1; + } HAServiceTarget target = resolveTarget(argv[0]); if (!checkManualStateManagementOK(target)) { return -1; @@ -142,7 +153,48 @@ public abstract class HAAdmin extends Co HAServiceProtocolHelper.transitionToActive(proto, createReqInfo()); return 0; } - + + /** + * Checks whether other target node is active or not + * @param targetNodeToActivate + * @return true if other target node is active or some other exception + * occurred and forceActive was set otherwise false + * @throws IOException + */ + private boolean isOtherTargetNodeActive(String targetNodeToActivate, boolean forceActive) + throws IOException { + Collection<String> targetIds = getTargetIds(targetNodeToActivate); + if(targetIds == null) { + errOut.println("transitionToActive: No target node in the " + + "current configuration"); + printUsage(errOut, "-transitionToActive"); + return true; + } + targetIds.remove(targetNodeToActivate); + for(String targetId : targetIds) { + HAServiceTarget target = resolveTarget(targetId); + if (!checkManualStateManagementOK(target)) { + return true; + } + try { + HAServiceProtocol proto = target.getProxy(getConf(), 5000); + if(proto.getServiceStatus().getState() == HAServiceState.ACTIVE) { + errOut.println("transitionToActive: Node " + targetId +" is already active"); + printUsage(errOut, "-transitionToActive"); + return true; + } + } catch (Exception e) { + //If forceActive switch is false then return true + if(!forceActive) { + errOut.println("Unexpected error occurred " + e.getMessage()); + printUsage(errOut, "-transitionToActive"); + return true; + } + } + } + return false; + } + private int transitionToStandby(final CommandLine cmd) throws IOException, ServiceFailedException { String[] argv = cmd.getArgs(); @@ -364,6 +416,9 @@ public abstract class HAAdmin extends Co if ("-failover".equals(cmd)) { addFailoverCliOpts(opts); } + if("-transitionToActive".equals(cmd)) { + addTransitionToActiveCliOpts(opts); + } // Mutative commands take FORCEMANUAL option if ("-transitionToActive".equals(cmd) || "-transitionToStandby".equals(cmd) || @@ -433,6 +488,14 @@ public abstract class HAAdmin extends Co // that change state. } + /** + * Add CLI options which are specific to the transitionToActive command and + * no others. + */ + private void addTransitionToActiveCliOpts(Options transitionToActiveCliOpts) { + transitionToActiveCliOpts.addOption(FORCEACTIVE, false, "force active"); + } + private CommandLine parseOpts(String cmdName, Options opts, String[] argv) { try { // Strip off the first arg, since that's just the command name Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryInvocationHandler.java Thu May 15 13:03:22 2014 @@ -150,7 +150,7 @@ public class RetryInvocationHandler<T> i } if (action.delayMillis > 0) { - ThreadUtil.sleepAtLeastIgnoreInterrupts(action.delayMillis); + Thread.sleep(action.delayMillis); } if (action.action == RetryAction.RetryDecision.FAILOVER_AND_RETRY) { Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/ShellBasedUnixGroupsMapping.java Thu May 15 13:03:22 2014 @@ -74,7 +74,8 @@ public class ShellBasedUnixGroupsMapping * Get the current user's group list from Unix by running the command 'groups' * NOTE. For non-existing user it will return EMPTY list * @param user user name - * @return the groups list that the <code>user</code> belongs to + * @return the groups list that the <code>user</code> belongs to. The primary + * group is returned first. * @throws IOException if encounter any error when running the command */ private static List<String> getUnixGroups(final String user) throws IOException { @@ -84,6 +85,7 @@ public class ShellBasedUnixGroupsMapping } catch (ExitCodeException e) { // if we didn't get the group - just return empty list; LOG.warn("got exception trying to get groups for user " + user, e); + return new LinkedList<String>(); } StringTokenizer tokenizer = @@ -92,6 +94,17 @@ public class ShellBasedUnixGroupsMapping while (tokenizer.hasMoreTokens()) { groups.add(tokenizer.nextToken()); } + + // remove duplicated primary group + if (!Shell.WINDOWS) { + for (int i = 1; i < groups.size(); i++) { + if (groups.get(i).equals(groups.get(0))) { + groups.remove(i); + break; + } + } + } + return groups; } } Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java Thu May 15 13:03:22 2014 @@ -19,12 +19,10 @@ package org.apache.hadoop.security.authorize; import java.net.InetAddress; -import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; -import java.util.HashSet; import java.util.Map; import java.util.Map.Entry; @@ -44,7 +42,6 @@ public class ProxyUsers { private static final String CONF_GROUPS = ".groups"; private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser."; private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\."; - public static final String CONF_HADOOP_PROXYSERVERS = "hadoop.proxyservers"; private static boolean init = false; //list of users, groups and hosts per proxyuser @@ -54,8 +51,6 @@ public class ProxyUsers { new HashMap<String, Collection<String>>(); private static Map<String, Collection<String>> proxyHosts = new HashMap<String, Collection<String>>(); - private static Collection<String> proxyServers = - new HashSet<String>(); /** * reread the conf and get new values for "hadoop.proxyuser.*.groups/users/hosts" @@ -75,7 +70,6 @@ public class ProxyUsers { proxyGroups.clear(); proxyHosts.clear(); proxyUsers.clear(); - proxyServers.clear(); // get all the new keys for users String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_USERS; @@ -103,22 +97,8 @@ public class ProxyUsers { proxyHosts.put(entry.getKey(), StringUtils.getTrimmedStringCollection(entry.getValue())); } - - // trusted proxy servers such as http proxies - for (String host : conf.getTrimmedStrings(CONF_HADOOP_PROXYSERVERS)) { - InetSocketAddress addr = new InetSocketAddress(host, 0); - if (!addr.isUnresolved()) { - proxyServers.add(addr.getAddress().getHostAddress()); - } - } init = true; - } - - public static synchronized boolean isProxyServer(String remoteAddr) { - if(!init) { - refreshSuperUserGroupsConfiguration(); - } - return proxyServers.contains(remoteAddr); + ProxyServers.refresh(conf); } /** Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/Shell.java Thu May 15 13:03:22 2014 @@ -132,11 +132,17 @@ abstract public class Shell { : new String[]{"bash", "-c", "groups"}; } - /** a Unix command to get a given user's groups list */ + /** + * a Unix command to get a given user's groups list. + * If the OS is not WINDOWS, the command will get the user's primary group + * first and finally get the groups list which includes the primary group. + * i.e. the user's primary group will be included twice. + */ public static String[] getGroupsForUserCommand(final String user) { //'groups username' command return is non-consistent across different unixes return (WINDOWS)? new String[] { WINUTILS, "groups", "-F", "\"" + user + "\""} - : new String [] {"bash", "-c", "id -Gn " + user}; + : new String [] {"bash", "-c", "id -gn " + user + + "&& id -Gn " + user}; } /** a Unix command to get a given netgroup's user list */ Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/crypto/key/TestKeyShell.java Thu May 15 13:03:22 2014 @@ -121,7 +121,7 @@ public class TestKeyShell { ks.setConf(new Configuration()); rc = ks.run(args1); assertEquals(-1, rc); - assertTrue(outContent.toString().contains("key1 has NOT been created.")); + assertTrue(outContent.toString().contains("key1 has not been created.")); } @Test @@ -134,7 +134,7 @@ public class TestKeyShell { ks.setConf(new Configuration()); rc = ks.run(args1); assertEquals(-1, rc); - assertTrue(outContent.toString().contains("key1 has NOT been created.")); + assertTrue(outContent.toString().contains("key1 has not been created.")); } @Test Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/retry/TestRetryProxy.java Thu May 15 13:03:22 2014 @@ -26,27 +26,37 @@ import static org.apache.hadoop.io.retry import static org.apache.hadoop.io.retry.RetryPolicies.retryUpToMaximumCountWithProportionalSleep; import static org.apache.hadoop.io.retry.RetryPolicies.retryUpToMaximumTimeWithFixedSleep; import static org.apache.hadoop.io.retry.RetryPolicies.exponentialBackoffRetry; +import static org.junit.Assert.*; import java.util.Collections; import java.util.Map; +import java.util.concurrent.Callable; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; - -import junit.framework.TestCase; +import java.util.concurrent.atomic.AtomicReference; import org.apache.hadoop.io.retry.UnreliableInterface.FatalException; import org.apache.hadoop.io.retry.UnreliableInterface.UnreliableException; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RemoteException; +import org.junit.Before; +import org.junit.Test; + +import java.lang.reflect.UndeclaredThrowableException; -public class TestRetryProxy extends TestCase { +public class TestRetryProxy { private UnreliableImplementation unreliableImpl; - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { unreliableImpl = new UnreliableImplementation(); } + @Test public void testTryOnceThenFail() throws UnreliableException { UnreliableInterface unreliable = (UnreliableInterface) RetryProxy.create(UnreliableInterface.class, unreliableImpl, TRY_ONCE_THEN_FAIL); @@ -62,6 +72,7 @@ public class TestRetryProxy extends Test /** * Test for {@link RetryInvocationHandler#isRpcInvocation(Object)} */ + @Test public void testRpcInvocation() throws Exception { // For a proxy method should return true final UnreliableInterface unreliable = (UnreliableInterface) @@ -91,6 +102,7 @@ public class TestRetryProxy extends Test assertFalse(RetryInvocationHandler.isRpcInvocation(new Object())); } + @Test public void testRetryForever() throws UnreliableException { UnreliableInterface unreliable = (UnreliableInterface) RetryProxy.create(UnreliableInterface.class, unreliableImpl, RETRY_FOREVER); @@ -99,6 +111,7 @@ public class TestRetryProxy extends Test unreliable.failsTenTimesThenSucceeds(); } + @Test public void testRetryUpToMaximumCountWithFixedSleep() throws UnreliableException { UnreliableInterface unreliable = (UnreliableInterface) RetryProxy.create(UnreliableInterface.class, unreliableImpl, @@ -113,6 +126,7 @@ public class TestRetryProxy extends Test } } + @Test public void testRetryUpToMaximumTimeWithFixedSleep() throws UnreliableException { UnreliableInterface unreliable = (UnreliableInterface) RetryProxy.create(UnreliableInterface.class, unreliableImpl, @@ -127,6 +141,7 @@ public class TestRetryProxy extends Test } } + @Test public void testRetryUpToMaximumCountWithProportionalSleep() throws UnreliableException { UnreliableInterface unreliable = (UnreliableInterface) RetryProxy.create(UnreliableInterface.class, unreliableImpl, @@ -141,6 +156,7 @@ public class TestRetryProxy extends Test } } + @Test public void testExponentialRetry() throws UnreliableException { UnreliableInterface unreliable = (UnreliableInterface) RetryProxy.create(UnreliableInterface.class, unreliableImpl, @@ -155,6 +171,7 @@ public class TestRetryProxy extends Test } } + @Test public void testRetryByException() throws UnreliableException { Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap = Collections.<Class<? extends Exception>, RetryPolicy>singletonMap(FatalException.class, TRY_ONCE_THEN_FAIL); @@ -171,6 +188,7 @@ public class TestRetryProxy extends Test } } + @Test public void testRetryByRemoteException() { Map<Class<? extends Exception>, RetryPolicy> exceptionToPolicyMap = Collections.<Class<? extends Exception>, RetryPolicy>singletonMap(FatalException.class, TRY_ONCE_THEN_FAIL); @@ -186,4 +204,35 @@ public class TestRetryProxy extends Test } } + @Test + public void testRetryInterruptible() throws Throwable { + final UnreliableInterface unreliable = (UnreliableInterface) + RetryProxy.create(UnreliableInterface.class, unreliableImpl, + retryUpToMaximumTimeWithFixedSleep(10, 10, TimeUnit.SECONDS)); + + final CountDownLatch latch = new CountDownLatch(1); + final AtomicReference<Thread> futureThread = new AtomicReference<Thread>(); + ExecutorService exec = Executors.newSingleThreadExecutor(); + Future<Throwable> future = exec.submit(new Callable<Throwable>(){ + @Override + public Throwable call() throws Exception { + futureThread.set(Thread.currentThread()); + latch.countDown(); + try { + unreliable.alwaysFailsWithFatalException(); + } catch (UndeclaredThrowableException ute) { + return ute.getCause(); + } + return null; + } + }); + latch.await(); + Thread.sleep(1000); // time to fail and sleep + assertTrue(futureThread.get().isAlive()); + futureThread.get().interrupt(); + Throwable e = future.get(1, TimeUnit.SECONDS); // should return immediately + assertNotNull(e); + assertEquals(InterruptedException.class, e.getClass()); + assertEquals("sleep interrupted", e.getMessage()); + } } Modified: hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java?rev=1594906&r1=1594905&r2=1594906&view=diff ============================================================================== --- hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java (original) +++ hadoop/common/branches/HDFS-2006/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java Thu May 15 13:03:22 2014 @@ -327,17 +327,6 @@ public class TestProxyUsers { assertEquals (1,hosts.size()); } - @Test - public void testProxyServer() { - Configuration conf = new Configuration(); - assertFalse(ProxyUsers.isProxyServer("1.1.1.1")); - conf.set(ProxyUsers.CONF_HADOOP_PROXYSERVERS, "2.2.2.2, 3.3.3.3"); - ProxyUsers.refreshSuperUserGroupsConfiguration(conf); - assertFalse(ProxyUsers.isProxyServer("1.1.1.1")); - assertTrue(ProxyUsers.isProxyServer("2.2.2.2")); - assertTrue(ProxyUsers.isProxyServer("3.3.3.3")); - } - private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) { try { ProxyUsers.authorize(proxyUgi, host);