HDFS-8078 Fix HDFS client gets errors trying to to connect to IPv6 DataNode

    Signed-off-by: Nate Edel <n...@fb.com>


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/af980206
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/af980206
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/af980206

Branch: refs/heads/HADOOP-11890
Commit: af9802065dadc25787b77a8634b49ca6a9964783
Parents: e6347c9
Author: Nate Edel <n...@fb.com>
Authored: Wed Sep 16 17:39:16 2015 -0700
Committer: Elliott Clark <ecl...@apache.org>
Committed: Fri Oct 23 11:55:11 2015 -0700

----------------------------------------------------------------------
 .../java/org/apache/hadoop/net/NetUtils.java    | 180 ++++++++++++++----
 .../org/apache/hadoop/net/TestNetUtils.java     |   8 +-
 .../apache/hadoop/hdfs/protocol/DatanodeID.java |  19 +-
 .../datatransfer/sasl/DataTransferSaslUtil.java |   8 +-
 .../hdfs/util/TestIPv6FormatCompatibility.java  | 187 +++++++++++++++++++
 5 files changed, 351 insertions(+), 51 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/af980206/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
index e475149..efb772d 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
@@ -17,33 +17,9 @@
  */
 package org.apache.hadoop.net;
 
-import java.io.EOFException;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.OutputStream;
-import java.lang.reflect.Constructor;
-import java.net.BindException;
-import java.net.InetAddress;
-import java.net.InetSocketAddress;
-import java.net.NetworkInterface;
-import java.net.NoRouteToHostException;
-import java.net.ServerSocket;
-import java.net.Socket;
-import java.net.SocketAddress;
-import java.net.SocketException;
-import java.net.SocketTimeoutException;
-import java.net.URI;
-import java.net.URISyntaxException;
-import java.net.UnknownHostException;
-import java.net.ConnectException;
-import java.nio.channels.SocketChannel;
-import java.util.Map.Entry;
-import java.util.regex.Pattern;
-import java.util.*;
-import java.util.concurrent.ConcurrentHashMap;
-
-import javax.net.SocketFactory;
-
+import com.google.common.base.Preconditions;
+import com.google.common.net.HostAndPort;
+import com.google.common.net.InetAddresses;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.commons.net.util.SubnetUtils;
@@ -56,15 +32,26 @@ import org.apache.hadoop.ipc.Server;
 import org.apache.hadoop.ipc.VersionedProtocol;
 import org.apache.hadoop.security.SecurityUtil;
 import org.apache.hadoop.util.ReflectionUtils;
+import org.apache.http.conn.util.InetAddressUtils;
 
-import com.google.common.base.Preconditions;
+import javax.net.SocketFactory;
+import java.io.EOFException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.lang.reflect.Constructor;
+import java.net.*;
+import java.nio.channels.SocketChannel;
+import java.util.*;
+import java.util.Map.Entry;
+import java.util.concurrent.ConcurrentHashMap;
 
 @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
 @InterfaceStability.Unstable
 public class NetUtils {
   private static final Log LOG = LogFactory.getLog(NetUtils.class);
   
-  private static Map<String, String> hostToResolved = 
+  private static Map<String, String> hostToResolved =
                                      new HashMap<String, String>();
   /** text to point users elsewhere: {@value} */
   private static final String FOR_MORE_DETAILS_SEE
@@ -611,9 +598,6 @@ public class NetUtils {
     }
   }
 
-  private static final Pattern ipPortPattern = // Pattern for matching 
ip[:port]
-    Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}(:\\d+)?");
-  
   /**
    * Attempt to obtain the host name of the given string which contains
    * an IP address and an optional port.
@@ -622,16 +606,25 @@ public class NetUtils {
    * @return Host name or null if the name can not be determined
    */
   public static String getHostNameOfIP(String ipPort) {
-    if (null == ipPort || !ipPortPattern.matcher(ipPort).matches()) {
+    String ip = null;
+    if (null == ipPort || ipPort.isEmpty()) {
+      return null;
+    }
+    try {
+      HostAndPort hostAndPort = HostAndPort.fromString(ipPort);
+      ip = hostAndPort.getHostText();
+      if (!InetAddresses.isInetAddress(ip)) {
+        return null;
+      }
+    } catch (IllegalArgumentException e) {
+      LOG.debug("getHostNameOfIP: '" + ipPort
+          + "' is not a valid IP address or IP/Port pair.", e);
       return null;
     }
-    
     try {
-      int colonIdx = ipPort.indexOf(':');
-      String ip = (-1 == colonIdx) ? ipPort
-          : ipPort.substring(0, ipPort.indexOf(':'));
       return InetAddress.getByName(ip).getHostName();
     } catch (UnknownHostException e) {
+      LOG.trace("getHostNameOfIP: '"+ipPort+"' name not resolved.", e);
       return null;
     }
   }
@@ -647,11 +640,88 @@ public class NetUtils {
   
   /**
    * Compose a "host:port" string from the address.
+   *
+   * Note that this preferentially returns the host name if available; if the
+   * IP address is desired, use getIPPortString(); if both are desired as in
+   * InetSocketAddress.toString, use getSocketAddressString()
    */
   public static String getHostPortString(InetSocketAddress addr) {
-    return addr.getHostName() + ":" + addr.getPort();
+    String hostName = addr.getHostName();
+    if (InetAddressUtils.isIPv6Address(hostName)) {
+      return "[" + hostName + "]:" + addr.getPort();
+    }
+    return hostName + ":" + addr.getPort();
   }
-  
+
+  /**
+   * Compose a "ip:port" string from the InetSocketAddress.
+   *
+   * Note that this may result in an NPE if passed an unresolved
+   * InetSocketAddress.
+   */
+  public static String getIPPortString(InetSocketAddress addr) {
+    final InetAddress ip = addr.getAddress();
+    // this is a judgement call, and we might arguably just guard against NPE
+    // by treating null as "" ; I think this is going to hide more bugs than it
+    // prevents
+    if (ip == null) {
+      throw new IllegalArgumentException(
+          "getIPPortString called with unresolved InetSocketAddress : "
+              + getSocketAddressString(addr));
+    }
+    String ipString = ip.getHostAddress();
+    if (ip instanceof Inet6Address) {
+      return "[" + ipString + "]:" + addr.getPort();
+    }
+    return ipString + ":" + addr.getPort();
+  }
+
+  public static String getIPPortString(String ipAddr, int port) {
+    String s;
+    if (ipAddr != null) {
+      s = ipAddr + ":" + port;
+    } else {
+      s = ":" + port;
+    }
+    //Blank eventually will get to treated as localhost if this gets down to
+    // InetAddress. Tests extensively use a blank address, and we don't want
+    // to change behavior here.
+    if (ipAddr != null && !ipAddr.isEmpty() && 
InetAddressUtils.isIPv6Address(ipAddr)) {
+      try {
+        InetAddress addr = InetAddress.getByName(ipAddr);
+        String cleanAddr = addr.getHostAddress();
+        if (addr instanceof Inet6Address) {
+          s = '[' + cleanAddr + ']' + ":" + port;
+        }
+      } catch (UnknownHostException e) {
+        // ignore anything that isn't an IPv6 literal and keep the old
+        // behavior. could add debug log here, but this should only happen
+        // if there's a bug in InetAddressUtils.isIPv6Address which accepts
+        // something that isn't an IPv6 literal.
+      }
+    }
+    return s;
+  }
+
+  /**
+   * An IPv6-safe version of InetSocketAddress.toString().
+   * Note that this will typically be of the form hostname/IP:port and is NOT
+   * a substitute for getHostPortString or getIPPortString.
+   */
+  public static String getSocketAddressString(InetSocketAddress addr) {
+    if (addr.isUnresolved()) {
+      return addr.toString();
+    }
+    InetAddress ip = addr.getAddress();
+    if (ip instanceof Inet6Address) {
+      String hostName = addr.getHostName();
+      return ((hostName != null) ? hostName : "")
+          + "/[" + ip.getHostAddress() + "]:" + addr.getPort();
+    } else {
+      return addr.toString();
+    }
+  }
+
   /**
    * Checks if {@code host} is a local host name and return {@link InetAddress}
    * corresponding to that address.
@@ -913,4 +983,36 @@ public class NetUtils {
     }
     return port;
   }
+
+  /**
+   * Wrapper method on HostAndPort; returns the port from a host:port
+   * or IP:port pair.
+   *
+   * It's probably best to create your own HostAndPort.fromString(hp) and
+   * do a .getPort and .getHostText if you need both host and port in one
+   * scope.
+   */
+  public static int getPortFromHostPort(String hp) {
+    return HostAndPort.fromString(hp).getPort();
+  }
+
+  /**
+   * Wrapper method on HostAndPort; returns the host from a host:port
+   * or IP:port pair.
+   *
+   * It's probably best to create your own HostAndPort.fromString(hp) and
+   * do a .getPort and .getHostText if you need both host and port in one
+   * scope.
+   */
+  public static String getHostFromHostPort(String hp) {
+    return HostAndPort.fromString(hp).getHostText();
+  }
+
+  public static InetAddress getInetAddressFromInetSocketAddressString(
+      String remoteAddr) {
+    int slashIdx = remoteAddr.indexOf('/') + 1;
+    int colonIdx = remoteAddr.lastIndexOf(':');
+    String ipOnly = remoteAddr.substring(slashIdx, colonIdx);
+    return InetAddresses.forString(ipOnly);
+  }
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/af980206/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
index c93ede8..55937a6 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
@@ -622,8 +622,10 @@ public class TestNetUtils {
     } catch (UnknownHostException e) {
       Assume.assumeTrue("Network not resolving "+ oneHost, false);
     }
-    List<String> hosts = Arrays.asList("127.0.0.1",
-        "localhost", oneHost, "UnknownHost123");
+
+    List<String> hosts = Arrays.asList(new String[] {"127.0.0.1",
+        "localhost", oneHost, "UnknownHost123.invalid"});
+
     List<String> normalizedHosts = NetUtils.normalizeHostNames(hosts);
     String summary = "original [" + StringUtils.join(hosts, ", ") + "]"
         + " normalized [" + StringUtils.join(normalizedHosts, ", ") + "]";
@@ -649,6 +651,8 @@ public class TestNetUtils {
     assertNull(NetUtils.getHostNameOfIP("127.0.0.1:"));   // no port
     assertNull(NetUtils.getHostNameOfIP("127.0.0.1:-1")); // bogus port
     assertNull(NetUtils.getHostNameOfIP("127.0.0.1:A"));  // bogus port
+    assertNotNull(NetUtils.getHostNameOfIP("[::1]"));
+    assertNotNull(NetUtils.getHostNameOfIP("[::1]:1"));
     assertNotNull(NetUtils.getHostNameOfIP("127.0.0.1"));
     assertNotNull(NetUtils.getHostNameOfIP("127.0.0.1:1"));
   }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/af980206/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
index 5fd845d..9ac2f46 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
@@ -22,6 +22,13 @@ import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
 import com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.net.NetUtils;
+import sun.rmi.runtime.Log;
+
+import java.net.Inet6Address;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.UnknownHostException;
 
 /**
  * This class represents the primary identifier for a Datanode.
@@ -96,15 +103,16 @@ public class DatanodeID implements Comparable<DatanodeID> {
   }
 
   public void setIpAddr(String ipAddr) {
+    this.ipAddr = ipAddr;
     //updated during registration, preserve former xferPort
-    setIpAndXferPort(ipAddr, xferPort);
+    setIpAndXferPort(this.ipAddr, xferPort);
   }
 
   private void setIpAndXferPort(String ipAddr, int xferPort) {
     // build xferAddr string to reduce cost of frequent use
     this.ipAddr = ipAddr;
     this.xferPort = xferPort;
-    this.xferAddr = ipAddr + ":" + xferPort;
+    this.xferAddr = NetUtils.getIPPortString(ipAddr,xferPort);
   }
 
   public void setPeerHostName(String peerHostName) {
@@ -158,21 +166,21 @@ public class DatanodeID implements Comparable<DatanodeID> 
{
    * @return IP:ipcPort string
    */
   private String getIpcAddr() {
-    return ipAddr + ":" + ipcPort;
+    return NetUtils.getIPPortString(ipAddr, ipcPort);
   }
 
   /**
    * @return IP:infoPort string
    */
   public String getInfoAddr() {
-    return ipAddr + ":" + infoPort;
+    return NetUtils.getIPPortString(ipAddr, infoPort);
   }
 
   /**
    * @return IP:infoPort string
    */
   public String getInfoSecureAddr() {
-    return ipAddr + ":" + infoSecurePort;
+    return NetUtils.getIPPortString(ipAddr, infoSecurePort);
   }
 
   /**
@@ -256,6 +264,7 @@ public class DatanodeID implements Comparable<DatanodeID> {
    * Note that this does not update storageID.
    */
   public void updateRegInfo(DatanodeID nodeReg) {
+    ipAddr = nodeReg.getIpAddr();
     setIpAndXferPort(nodeReg.getIpAddr(), nodeReg.getXferPort());
     hostName = nodeReg.getHostName();
     peerHostName = nodeReg.getPeerHostName();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/af980206/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java
 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java
index 5e07550..b2c7157 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/DataTransferSaslUtil.java
@@ -51,6 +51,7 @@ import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncr
 import 
org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.DataTransferEncryptorMessageProto.DataTransferEncryptorStatus;
 import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CipherOptionProto;
 import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.security.SaslPropertiesResolver;
 import org.apache.hadoop.security.SaslRpcServer.QualityOfProtection;
 import org.slf4j.Logger;
@@ -156,11 +157,8 @@ public final class DataTransferSaslUtil {
    * @return InetAddress from peer
    */
   public static InetAddress getPeerAddress(Peer peer) {
-    String remoteAddr = peer.getRemoteAddressString().split(":")[0];
-    int slashIdx = remoteAddr.indexOf('/');
-    return InetAddresses.forString(slashIdx != -1 ?
-        remoteAddr.substring(slashIdx + 1, remoteAddr.length()) :
-        remoteAddr);
+    String remoteAddr = peer.getRemoteAddressString();
+    return NetUtils.getInetAddressFromInetSocketAddressString(remoteAddr);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/hadoop/blob/af980206/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestIPv6FormatCompatibility.java
----------------------------------------------------------------------
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestIPv6FormatCompatibility.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestIPv6FormatCompatibility.java
new file mode 100644
index 0000000..ca474e5
--- /dev/null
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestIPv6FormatCompatibility.java
@@ -0,0 +1,187 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs.util;
+
+import com.google.common.net.InetAddresses;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.hdfs.net.Peer;
+import org.apache.hadoop.hdfs.protocol.DatanodeID;
+import org.apache.hadoop.hdfs.protocol.datatransfer.sasl.DataTransferSaslUtil;
+import org.apache.hadoop.net.unix.DomainSocket;
+import org.junit.Test;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.net.Inet4Address;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.net.SocketAddress;
+import java.nio.channels.ReadableByteChannel;
+
+import static org.junit.Assert.*;
+
+/**
+ * This is a very basic, very fast test to test IPv6 parsing issues as we find 
them.
+ * It does NOT depend on having a working IPv6 stack and should succeed even 
if run
+ * with "-Djava.net.preferIPv4Stack=true"
+ */
+public class TestIPv6FormatCompatibility {
+  private final String IPV6_LOOPBACK_LONG_STRING = "0:0:0:0:0:0:0:1";
+  private final String IPV6_SAMPLE_ADDRESS = 
"2a03:2880:2130:cf05:face:b00c:0:1";
+  private final String IPV6_LOOPBACK_SHORT_STRING = "::1";
+  private final String IPV4_LOOPBACK_WITH_PORT = "127.0.0.1:10";
+  private final String IPV6_LOOPBACK_WITH_PORT = 
"["+IPV6_LOOPBACK_LONG_STRING+"]:10";
+  private final String IPV6_SAMPLE_WITH_PORT = "[" + IPV6_SAMPLE_ADDRESS + 
"]:10";
+  private final InetAddress IPV6LOOPBACK = 
InetAddresses.forString(IPV6_LOOPBACK_LONG_STRING);
+  private final InetAddress IPV4LOOPBACK = Inet4Address.getLoopbackAddress();
+  private final InetAddress IPV6SAMPLE = 
InetAddresses.forString(IPV6_SAMPLE_ADDRESS);
+  private final String IPV4_LOOPBACK_STRING = IPV4LOOPBACK.getHostAddress();
+
+  private static final Log LOG = 
LogFactory.getLog(TestIPv6FormatCompatibility.class);
+
+  // HDFS-8078 : note that we're expecting URI-style (see Javadoc for 
java.net.URI or rfc2732)
+  @Test
+  public void testDatanodeIDXferAddressAddsBrackets() {
+    DatanodeID ipv4localhost =
+        new DatanodeID(IPV4_LOOPBACK_STRING, "localhost", "no-uuid", 10, 20, 
30, 40);
+    DatanodeID ipv6localhost =
+        new DatanodeID(IPV6_LOOPBACK_LONG_STRING, "localhost", "no-uuid", 10, 
20, 30, 40);
+    DatanodeID ipv6sample =
+        new DatanodeID(IPV6_SAMPLE_ADDRESS, "ipv6.example.com", "no-uuid", 10, 
20, 30, 40);
+    assertEquals("IPv6 should have brackets added",
+        IPV6_LOOPBACK_WITH_PORT, ipv6localhost.getXferAddr(false));
+    assertEquals("IPv6 should have brackets added",
+        IPV6_SAMPLE_WITH_PORT, ipv6sample.getXferAddr(false));
+    assertEquals("IPv4 should not have brackets added",
+        IPV4_LOOPBACK_WITH_PORT, ipv4localhost.getXferAddr(false));
+  }
+
+  // HDFS-8078
+  @Test
+  public void testDatanodeIDXferAddressShouldNormalizeIPv6() {
+    DatanodeID ipv6short =
+        new DatanodeID(IPV6_LOOPBACK_SHORT_STRING, "localhost", "no-uuid", 10, 
20, 30, 40);
+    assertEquals("IPv6 should be normalized and not abbreviated",
+        IPV6_LOOPBACK_WITH_PORT, ipv6short.getXferAddr(false));
+  }
+
+  // HDFS-8078 : note that in some cases we're parsing the results of 
java.net.SocketAddress.toString() \
+  // which doesn't product the URI-style results, and we're splitting this 
rather than producing the
+  // combined string to be consumed.
+  @Test
+  public void testGetPeerShouldFindFullIPAddress() {
+    Peer ipv6SamplePeer = new MockInetPeer(IPV6SAMPLE, false);
+    Peer ipv4loopback = new MockInetPeer(IPV4LOOPBACK, false);
+    Peer ipv6loopback = new MockInetPeer(IPV6LOOPBACK, false);
+    assertNotNull(DataTransferSaslUtil.getPeerAddress(ipv6SamplePeer));
+    assertNotNull(DataTransferSaslUtil.getPeerAddress(ipv6loopback));
+    assertNotNull(DataTransferSaslUtil.getPeerAddress(ipv4loopback));
+  }
+
+  // HDFS-8078 : It looks like in some cases this could also produce URI-style
+  // results, so we test both.
+  @Test
+  public void testGetPeerAccept() {
+    Peer ipv6loopbackAsURI = new MockInetPeer(IPV6LOOPBACK, true);
+    assertEquals("getPeer should still with URI-style [bracket]", 
IPV6_LOOPBACK_LONG_STRING, 
DataTransferSaslUtil.getPeerAddress(ipv6loopbackAsURI).getHostAddress());
+  }
+
+  /**
+   * Mocks a Peer purely to test DataTransferSaslUtil,getPeerAddress() which 
takes a Peer
+   * and consumers getRemoteAddressString(). All other functionality missing.
+   */
+  private class MockInetPeer implements Peer {
+    SocketAddress sa;
+    boolean asURI;
+
+    public MockInetPeer(InetAddress addr, boolean asURI) {
+      sa = new InetSocketAddress(addr, 50010);
+      this.asURI = asURI;
+    }
+
+    @Override
+    public ReadableByteChannel getInputStreamChannel() {
+      return null;
+    }
+
+    @Override
+    public void setReadTimeout(int timeoutMs) throws IOException {
+    }
+
+    @Override
+    public int getReceiveBufferSize() throws IOException {
+      return 0;
+    }
+
+    @Override
+    public boolean getTcpNoDelay() throws IOException {
+      return false;
+    }
+
+    @Override
+    public void setWriteTimeout(int timeoutMs) throws IOException {
+
+    }
+
+    @Override
+    public boolean isClosed() {
+      return false;
+    }
+
+    @Override
+    public void close() throws IOException {
+    }
+
+    @Override
+    public String getRemoteAddressString() {
+      return sa.toString();
+    }
+
+    @Override
+    public String getLocalAddressString() {
+      return null;
+    }
+
+    @Override
+    public InputStream getInputStream() throws IOException {
+      return null;
+    }
+
+    @Override
+    public OutputStream getOutputStream() throws IOException {
+      return null;
+    }
+
+    @Override
+    public boolean isLocal() {
+      return false;
+    }
+
+    @Override
+    public DomainSocket getDomainSocket() {
+      return null;
+    }
+
+    @Override
+    public boolean hasSecureChannel() {
+      return false;
+    }
+  }
+}
\ No newline at end of file

Reply via email to