HADOOP-12491 Avoid unsafe split and append on fields that might be IPv6 literals


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/dd776461
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/dd776461
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/dd776461

Branch: refs/heads/HADOOP-11890
Commit: dd7764611e3ca00c5d34bae5456df032d1c05495
Parents: d89cecf
Author: Elliott Clark <ecl...@apache.org>
Authored: Tue Oct 20 12:40:12 2015 -0700
Committer: Elliott Clark <ecl...@apache.org>
Committed: Fri Oct 23 11:59:34 2015 -0700

----------------------------------------------------------------------
 .../hadoop-common/src/main/conf/hadoop-env.sh   |  3 +-
 .../org/apache/hadoop/conf/Configuration.java   |  2 +-
 .../crypto/key/kms/KMSClientProvider.java       | 13 ++--
 .../main/java/org/apache/hadoop/ipc/Client.java | 13 ++--
 .../main/java/org/apache/hadoop/net/DNS.java    | 64 ++++++++++++++++----
 .../apache/hadoop/net/SocksSocketFactory.java   | 19 +++---
 .../apache/hadoop/ha/ClientBaseWithFixes.java   | 15 +----
 .../java/org/apache/hadoop/net/TestDNS.java     | 17 ++++++
 .../org/apache/hadoop/net/TestNetUtils.java     | 25 ++++++++
 9 files changed, 126 insertions(+), 45 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/dd776461/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh 
b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
index ae18542..fd454cc 100644
--- a/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
+++ b/hadoop-common-project/hadoop-common/src/main/conf/hadoop-env.sh
@@ -78,8 +78,7 @@
 # memory size.
 # export HADOOP_HEAPSIZE_MIN=
 
-# Extra Java runtime options for all Hadoop commands. We don't support
-# IPv6 yet/still, so by default the preference is set to IPv4.
+# Extra Java runtime options for all Hadoop commands.
 # export HADOOP_OPTS="-Djava.net.preferIPv4Stack=true"
 
 # Some parts of the shell code may do special things dependent upon

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dd776461/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 8801c6c..320028c 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -2131,7 +2131,7 @@ public class Configuration implements 
Iterable<Map.Entry<String,String>>,
       return updateConnectAddr(addressProperty, addr);
     }
 
-    final String connectHost = connectHostPort.split(":")[0];
+    final String connectHost = NetUtils.getHostFromHostPort(connectHostPort);
     // Create connect address using client address hostname and server port.
     return updateConnectAddr(addressProperty, NetUtils.createSocketAddrForHost(
         connectHost, addr.getPort()));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dd776461/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
index 1ffc44d..8c2c85d 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/KMSClientProvider.java
@@ -76,6 +76,7 @@ import 
org.apache.hadoop.crypto.key.KeyProviderCryptoExtension.CryptoExtension;
 import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Strings;
+import com.google.common.net.HostAndPort;
 
 /**
  * KMS client <code>KeyProvider</code> implementation.
@@ -253,16 +254,20 @@ public class KMSClientProvider extends KeyProvider 
implements CryptoExtension,
         // In the current scheme, all hosts have to run on the same port
         int port = -1;
         String hostsPart = authority;
+
         if (authority.contains(":")) {
-          String[] t = authority.split(":");
           try {
-            port = Integer.parseInt(t[1]);
-          } catch (Exception e) {
+            HostAndPort hp = HostAndPort.fromString(hostsPart);
+            if (hp.hasPort()) {
+              port = hp.getPort();
+              hostsPart = hp.getHostText();
+            }
+          } catch (IllegalArgumentException e) {
             throw new IOException(
                 "Could not parse port in kms uri [" + origUrl + "]");
           }
-          hostsPart = t[0];
         }
+
         return createProvider(providerUri, conf, origUrl, port, hostsPart);
       }
       return null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dd776461/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
index f067d59..885d449 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.ipc;
 
-import static org.apache.hadoop.ipc.RpcConstants.*;
-
 import java.io.BufferedInputStream;
 import java.io.BufferedOutputStream;
 import java.io.ByteArrayOutputStream;
@@ -98,6 +96,8 @@ import com.google.common.annotations.VisibleForTesting;
 import com.google.common.base.Preconditions;
 import com.google.common.util.concurrent.ThreadFactoryBuilder;
 import com.google.protobuf.CodedOutputStream;
+import static org.apache.hadoop.ipc.RpcConstants.CONNECTION_CONTEXT_CALL_ID;
+import static org.apache.hadoop.ipc.RpcConstants.PING_CALL_ID;
 
 /** A client for an IPC service.  IPC calls take a single {@link Writable} as a
  * parameter, and return a {@link Writable} as their value.  A service runs on
@@ -447,7 +447,7 @@ public class Client {
       this.authProtocol = trySasl ? AuthProtocol.SASL : AuthProtocol.NONE;
       
       this.setName("IPC Client (" + socketFactory.hashCode() +") connection to 
" +
-          server.toString() +
+          NetUtils.getSocketAddressString(server) +
           " from " + ((ticket==null)?"an unknown user":ticket.getUserName()));
       this.setDaemon(true);
     }
@@ -578,8 +578,9 @@ public class Client {
                                server.getHostName(), server.getPort());
 
       if (!server.equals(currentAddr)) {
-        LOG.warn("Address change detected. Old: " + server.toString() +
-                                 " New: " + currentAddr.toString());
+        LOG.warn("Address change detected. Old: " + NetUtils
+            .getSocketAddressString(server) +
+            " New: " + NetUtils.getSocketAddressString(currentAddr));
         server = currentAddr;
         return true;
       }
@@ -1691,7 +1692,7 @@ public class Client {
     
     @Override
     public String toString() {
-      return address.toString();
+      return NetUtils.getSocketAddressString(address);
     }
   }  
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dd776461/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
index a6dc8e3..17845b1 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/DNS.java
@@ -19,6 +19,7 @@
 package org.apache.hadoop.net;
 
 import com.google.common.net.InetAddresses;
+import com.google.common.annotations.VisibleForTesting;
 import com.sun.istack.Nullable;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -26,6 +27,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
 import java.net.InetAddress;
+import java.net.Inet6Address;
 import java.net.NetworkInterface;
 import java.net.SocketException;
 import java.net.UnknownHostException;
@@ -74,21 +76,22 @@ public class DNS {
    */
   public static String reverseDns(InetAddress hostIp, @Nullable String ns)
     throws NamingException {
-    //
-    // Builds the reverse IP lookup form
-    // This is formed by reversing the IP numbers and appending in-addr.arpa
-    //
-    String[] parts = hostIp.getHostAddress().split("\\.");
-    String reverseIP = parts[3] + "." + parts[2] + "." + parts[1] + "."
-      + parts[0] + ".in-addr.arpa";
+    String dnsQueryAddress;
+    if (hostIp instanceof Inet6Address) {
+      dnsQueryAddress = getIPv6DnsAddr((Inet6Address)hostIp, ns);
+    } else {
+      dnsQueryAddress = getIPv4DnsAddr(hostIp, ns);
+    }
+    LOG.info("Querying using DNS address: " + dnsQueryAddress);
 
     DirContext ictx = new InitialDirContext();
     Attributes attribute;
     try {
-      attribute = ictx.getAttributes("dns://"               // Use "dns:///" 
if the default
+      // Use "dns:///" if the default
+      // nameserver is to be used
+      attribute = ictx.getAttributes("dns://"
                          + ((ns == null) ? "" : ns) +
-                         // nameserver is to be used
-                         "/" + reverseIP, new String[] { "PTR" });
+                         "/" + dnsQueryAddress, new String[] { "PTR" });
     } finally {
       ictx.close();
     }
@@ -101,17 +104,52 @@ public class DNS {
     return hostname;
   }
 
+  private static String getIPv4DnsAddr(InetAddress hostIp, @Nullable String ns)
+    throws NamingException {
+    String ipString = hostIp.getHostAddress();
+    LOG.info("Doing reverse DNS lookup for IPv4 address: " + ipString);
+    String[] parts = ipString.split("\\.");
+    if (parts.length != 4) {
+      throw new NamingException("Invalid IPv4 address " + ipString);
+    }
+
+    return parts[3] + "." + parts[2] + "." + parts[1] + "."
+      + parts[0] + ".in-addr.arpa";
+  }
+
+  @VisibleForTesting
+  public static String getIPv6DnsAddr(Inet6Address hostIp, @Nullable String ns)
+    throws NamingException {
+    LOG.info("Doing reverse DNS lookup for IPv6 address: " +
+        hostIp.getHostAddress());
+
+    // bytes need to be converted to hex string and reversed to get IPv6
+    // reverse resolution address
+    byte[] bytes = hostIp.getAddress();
+    StringBuilder sb = new StringBuilder();
+    for(int pos = bytes.length - 1; pos >= 0; pos--) {
+      byte b = bytes[pos];
+      String hexStr = String.format("%02x", b);
+      sb.append(hexStr.charAt(1));
+      sb.append(".");
+      sb.append(hexStr.charAt(0));
+      sb.append(".");
+    }
+    sb.append("ip6.arpa");
+    return sb.toString();
+  }
+
   /**
    * @return NetworkInterface for the given subinterface name (eg eth0:0)
    *    or null if no interface with the given name can be found  
    */
   private static NetworkInterface getSubinterface(String strInterface)
       throws SocketException {
-    Enumeration<NetworkInterface> nifs = 
+    Enumeration<NetworkInterface> nifs =
       NetworkInterface.getNetworkInterfaces();
-      
+
     while (nifs.hasMoreElements()) {
-      Enumeration<NetworkInterface> subNifs = 
+      Enumeration<NetworkInterface> subNifs =
         nifs.nextElement().getSubInterfaces();
 
       while (subNifs.hasMoreElements()) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dd776461/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java
index 6b84f9d..8864104 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/SocksSocketFactory.java
@@ -31,6 +31,8 @@ import org.apache.hadoop.classification.InterfaceStability;
 import org.apache.hadoop.conf.Configurable;
 import org.apache.hadoop.conf.Configuration;
 
+import com.google.common.net.HostAndPort;
+
 /**
  * Specialized SocketFactory to create sockets with a SOCKS proxy
  */
@@ -146,13 +148,16 @@ public class SocksSocketFactory extends SocketFactory 
implements
    * @param proxyStr the proxy address using the format "host:port"
    */
   private void setProxy(String proxyStr) {
-    String[] strs = proxyStr.split(":", 2);
-    if (strs.length != 2)
-      throw new RuntimeException("Bad SOCKS proxy parameter: " + proxyStr);
-    String host = strs[0];
-    int port = Integer.parseInt(strs[1]);
-    this.proxy =
+    try {
+      HostAndPort hp = HostAndPort.fromString(proxyStr);
+      if (!hp.hasPort())
+        throw new RuntimeException("Bad SOCKS proxy parameter: " + proxyStr);
+      String host = hp.getHostText();
+      this.proxy =
         new Proxy(Proxy.Type.SOCKS, InetSocketAddress.createUnresolved(host,
-            port));
+        hp.getPort()));
+    } catch (IllegalArgumentException e) {
+      throw new RuntimeException("Bad SOCKS proxy parameter: " + proxyStr);
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dd776461/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java
index b1ce1d1..3ad4aed 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/ClientBaseWithFixes.java
@@ -31,6 +31,7 @@ import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
 
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.ServerSocketUtil;
 import org.apache.hadoop.util.Time;
 import org.apache.zookeeper.TestableZooKeeper;
@@ -321,22 +322,12 @@ public abstract class ClientBaseWithFixes extends 
ZKTestCase {
         return tmpDir;
     }
 
-    private static int getPort(String hostPort) {
-        String[] split = hostPort.split(":");
-        String portstr = split[split.length-1];
-        String[] pc = portstr.split("/");
-        if (pc.length > 1) {
-            portstr = pc[0];
-        }
-        return Integer.parseInt(portstr);
-    }
-
     static ServerCnxnFactory createNewServerInstance(File dataDir,
             ServerCnxnFactory factory, String hostPort, int maxCnxns)
         throws IOException, InterruptedException
     {
         ZooKeeperServer zks = new ZooKeeperServer(dataDir, dataDir, 3000);
-        final int PORT = getPort(hostPort);
+        final int PORT = NetUtils.getPortFromHostPort(hostPort);
         if (factory == null) {
             factory = ServerCnxnFactory.createFactory(PORT, maxCnxns);
         }
@@ -364,7 +355,7 @@ public abstract class ClientBaseWithFixes extends 
ZKTestCase {
             } catch (IOException ie) {
                 LOG.warn("Error closing logs ", ie);
             }
-            final int PORT = getPort(hostPort);
+            final int PORT = NetUtils.getPortFromHostPort(hostPort);
 
             Assert.assertTrue("waiting for server down",
                        ClientBaseWithFixes.waitForServerDown("127.0.0.1:" + 
PORT,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dd776461/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
index b26c7ca..bb82abf 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java
@@ -24,6 +24,7 @@ import java.net.NetworkInterface;
 import java.net.SocketException;
 import java.net.UnknownHostException;
 import java.net.InetAddress;
+import java.net.Inet6Address;
 
 import javax.naming.CommunicationException;
 import javax.naming.NameNotFoundException;
@@ -261,4 +262,20 @@ public class TestDNS {
     assertNotNull("localhost is null", localhost);
     LOG.info("Localhost IPAddr is " + localhost.toString());
   }
+
+  /**
+   * Test that dns query address is calculated correctly for ipv6 addresses
+   */
+  @Test
+  public void testIPv6ReverseDNSAddress() throws Exception {
+    Inet6Address adr = (Inet6Address)InetAddress.getByName("::");
+    assertEquals(
+        
"0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.ip6.arpa",
+        DNS.getIPv6DnsAddr(adr, null));
+
+    adr = (Inet6Address)InetAddress.getByName("fe80::62eb:69ff:fe9b:bade");
+    assertEquals(
+        
"e.d.a.b.b.9.e.f.f.f.9.6.b.e.2.6.0.0.0.0.0.0.0.0.0.0.0.0.0.8.e.f.ip6.arpa",
+        DNS.getIPv6DnsAddr(adr, null));
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/dd776461/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
index 55937a6..ddb1f83 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
@@ -57,6 +57,11 @@ public class TestNetUtils {
   private static final String DEST_PORT_NAME = Integer.toString(DEST_PORT);
   private static final int LOCAL_PORT = 8080;
   private static final String LOCAL_PORT_NAME = Integer.toString(LOCAL_PORT);
+  private final String IPV6_LOOPBACK_LONG_STRING = "0:0:0:0:0:0:0:1";
+  private final String IPV6_SAMPLE_ADDRESS = 
"2a03:2880:2130:cf05:face:b00c:0:1";
+  private final String IPV6_LOOPBACK_SHORT_STRING = "::1";
+  private final String IPV6_LOOPBACK_WITH_PORT = 
"["+IPV6_LOOPBACK_LONG_STRING+"]:10";
+  private final String IPV6_SAMPLE_WITH_PORT = "[" + IPV6_SAMPLE_ADDRESS + 
"]:10";
 
   /**
    * Some slop around expected times when making sure timeouts behave
@@ -658,6 +663,15 @@ public class TestNetUtils {
   }
 
   @Test
+  public void testGetHostNameOfIPworksWithIPv6() {
+    assertNotNull(NetUtils.getHostNameOfIP(IPV6_LOOPBACK_LONG_STRING));
+    assertNotNull(NetUtils.getHostNameOfIP(IPV6_LOOPBACK_SHORT_STRING));
+    assertNotNull(NetUtils.getHostNameOfIP(IPV6_SAMPLE_ADDRESS));
+    assertNotNull(NetUtils.getHostNameOfIP(IPV6_SAMPLE_WITH_PORT));
+    assertNotNull(NetUtils.getHostNameOfIP(IPV6_LOOPBACK_WITH_PORT));
+  }
+
+  @Test
   public void testTrimCreateSocketAddress() {
     Configuration conf = new Configuration();
     NetUtils.addStaticResolution("host", "127.0.0.1");
@@ -668,6 +682,17 @@ public class TestNetUtils {
     assertEquals(defaultAddr.trim(), NetUtils.getHostPortString(addr));
   }
 
+  @Test
+  public void testTrimCreateSocketAddressIPv6() {
+    Configuration conf = new Configuration();
+    NetUtils.addStaticResolution("hostIPv6", IPV6_LOOPBACK_LONG_STRING);
+    final String defaultAddr = "hostIPv6:1  ";
+
+    InetSocketAddress addr = NetUtils.createSocketAddr(defaultAddr);
+    conf.setSocketAddr("myAddress", addr);
+    assertEquals(defaultAddr.trim(), NetUtils.getHostPortString(addr));
+  }
+
   private <T> void assertBetterArrayEquals(T[] expect, T[]got) {
     String expectStr = StringUtils.join(expect, ", ");
     String gotStr = StringUtils.join(got, ", ");

Reply via email to