This is an automated email from the ASF dual-hosted git repository.

brahma pushed a commit to branch HADOOP-17800
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/HADOOP-17800 by this push:
     new 2c9b22f  MAPREDUCE-6519. Avoid unsafe split and append on fields that 
might be IPv6 literals. Contributed by Nemanja Matkovic And Hemanth Boyina.
2c9b22f is described below

commit 2c9b22f15cdb8b9498a82846d3b4a48d93ef7d17
Author: Brahma Reddy Battula <bra...@apache.org>
AuthorDate: Sat Jul 31 22:09:26 2021 +0530

    MAPREDUCE-6519. Avoid unsafe split and append on fields that might be IPv6 
literals. Contributed by Nemanja Matkovic And Hemanth Boyina.
---
 .../org/apache/hadoop/mapred/FileInputFormat.java   | 21 +++++++++++----------
 .../org/apache/hadoop/mapreduce/util/HostUtil.java  |  6 ++----
 .../mapreduce/v2/hs/HistoryClientService.java       | 11 +++++++----
 .../apache/hadoop/ipc/TestMRCJCSocketFactory.java   |  8 +++++---
 .../org/apache/hadoop/mapred/ReliabilityTest.java   | 12 ++----------
 .../apache/hadoop/mapred/TestClientRedirect.java    |  8 +++++---
 .../org/apache/hadoop/mapred/UtilsForTests.java     | 16 ----------------
 .../hadoop/mapreduce/MiniHadoopClusterManager.java  |  5 +++--
 8 files changed, 35 insertions(+), 52 deletions(-)

diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
index 91151f0..d18a722 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/FileInputFormat.java
@@ -43,6 +43,7 @@ import org.apache.hadoop.fs.RemoteIterator;
 import org.apache.hadoop.mapreduce.security.TokenCache;
 import org.apache.hadoop.net.NetworkTopology;
 import org.apache.hadoop.net.Node;
+import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.NodeBase;
 import org.apache.hadoop.util.ReflectionUtils;
 import org.apache.hadoop.util.StopWatch;
@@ -712,19 +713,19 @@ public abstract class FileInputFormat<K, V> implements 
InputFormat<K, V> {
   
   private String[] identifyHosts(int replicationFactor, 
                                  Map<Node,NodeInfo> racksMap) {
-    
+
     String [] retVal = new String[replicationFactor];
-   
-    List <NodeInfo> rackList = new LinkedList<NodeInfo>(); 
+
+    List <NodeInfo> rackList = new LinkedList<NodeInfo>();
 
     rackList.addAll(racksMap.values());
-    
+
     // Sort the racks based on their contribution to this split
     sortInDescendingOrder(rackList);
     
     boolean done = false;
     int index = 0;
-    
+
     // Get the host list for all our aggregated items, sort
     // them and return the top entries
     for (NodeInfo ni: rackList) {
@@ -733,27 +734,27 @@ public abstract class FileInputFormat<K, V> implements 
InputFormat<K, V> {
 
       List<NodeInfo>hostList = new LinkedList<NodeInfo>();
       hostList.addAll(hostSet);
-    
+
       // Sort the hosts in this rack based on their contribution
       sortInDescendingOrder(hostList);
 
       for (NodeInfo host: hostList) {
         // Strip out the port number from the host name
-        retVal[index++] = host.node.getName().split(":")[0];
+        retVal[index++] = NetUtils.getHostFromHostPort(host.node.getName());
         if (index == replicationFactor) {
           done = true;
           break;
         }
       }
-      
+
       if (done == true) {
         break;
       }
     }
     return retVal;
   }
-  
-  private String[] fakeRacks(BlockLocation[] blkLocations, int index) 
+
+  private String[] fakeRacks(BlockLocation[] blkLocations, int index)
   throws IOException {
     String[] allHosts = blkLocations[index].getHosts();
     String[] allTopos = new String[allHosts.length];
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/HostUtil.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/HostUtil.java
index ad279ee..1ba4387 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/HostUtil.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/HostUtil.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.mapreduce.util;
 
 import org.apache.hadoop.classification.InterfaceAudience.Private;
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
+import org.apache.hadoop.net.NetUtils;
 
 @Private
 @Unstable
@@ -56,10 +57,7 @@ public class HostUtil {
   public static String convertTrackerNameToHostName(String trackerName) {
     // Ugly!
     // Convert the trackerName to its host name
-    int indexOfColon = trackerName.indexOf(":");
-    String trackerHostName = (indexOfColon == -1) ? 
-      trackerName : 
-      trackerName.substring(0, indexOfColon);
+    String trackerHostName = NetUtils.getHostFromHostPort(trackerName);
     return trackerHostName.substring("tracker_".length());
   }
 
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
index 40faf06..c6f5b53 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
@@ -91,6 +91,7 @@ import org.apache.hadoop.yarn.webapp.WebApp;
 import org.apache.hadoop.yarn.webapp.WebApps;
 
 import 
org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
+import com.google.common.net.HostAndPort;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -173,10 +174,12 @@ public class HistoryClientService extends AbstractService 
{
         .withXFSProtection(JHAdminConfig.MR_HISTORY_XFS_PREFIX)
         .withAppClientProtocol(appClientProtocol)
         .at(NetUtils.getHostPortString(bindAddress)).start(webApp);
-    
-    String connectHost = 
MRWebAppUtil.getJHSWebappURLWithoutScheme(conf).split(":")[0];
-    MRWebAppUtil.setJHSWebappURLWithoutScheme(conf,
-        connectHost + ":" + webApp.getListenerAddress().getPort());
+
+    String connectHost = MRWebAppUtil.getJHSWebappURLWithoutScheme(conf);
+
+    MRWebAppUtil.setJHSWebappURLWithoutScheme(conf, HostAndPort
+        .fromParts(HostAndPort.fromString(connectHost).getHost(),
+            webApp.getListenerAddress().getPort()).toString());
   }
 
   @Override
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java
index 123947a..e89b62f 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestMRCJCSocketFactory.java
@@ -37,6 +37,8 @@ import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.junit.Assert;
 import org.junit.Test;
 
+import com.google.common.net.HostAndPort;
+
 /**
  * This class checks that RPCs can use specialized socket factories.
  */
@@ -89,9 +91,9 @@ public class TestMRCJCSocketFactory {
                 "org.apache.hadoop.ipc.DummySocketFactory");
       jconf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
       String rmAddress = jconf.get(YarnConfiguration.RM_ADDRESS);
-      String[] split = rmAddress.split(":");
-      jconf.set(YarnConfiguration.RM_ADDRESS, split[0] + ':'
-          + (Integer.parseInt(split[1]) + 10));
+      HostAndPort hp = HostAndPort.fromString(rmAddress);
+      jconf.set("yarn.resourcemanager.address",
+          hp.getHost() + ':' + (hp.getPort() + 10));
       client = new JobClient(jconf);
 
       JobStatus[] jobs = client.jobsToComplete();
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
index 303857b..1f53743 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ReliabilityTest.java
@@ -33,6 +33,7 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.util.HostUtil;
 import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
 import org.apache.hadoop.util.GenericOptionsParser;
 import org.apache.hadoop.util.Shell;
@@ -342,7 +343,7 @@ public class ReliabilityTest extends Configured implements 
Tool {
       LOG.info(new Date() + " Stopping a few trackers");
 
       for (String tracker : trackerNamesList) {
-        String host = convertTrackerNameToHostName(tracker);
+        String host = HostUtil.convertTrackerNameToHostName(tracker);
         LOG.info(new Date() + " Marking tracker on host: " + host);
         fos.write((host + "\n").getBytes());
         if (count++ >= trackerNamesList.size()/2) {
@@ -381,15 +382,6 @@ public class ReliabilityTest extends Configured implements 
Tool {
       }
     }
 
-    private String convertTrackerNameToHostName(String trackerName) {
-      // Convert the trackerName to it's host name
-      int indexOfColon = trackerName.indexOf(":");
-      String trackerHostName = (indexOfColon == -1) ?
-          trackerName :
-            trackerName.substring(0, indexOfColon);
-      return trackerHostName.substring("tracker_".length());
-    }
-
   }
 
   private class KillTaskThread extends Thread {
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
index 5972f65..d948146 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
@@ -151,6 +151,8 @@ import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.net.HostAndPort;
+
 public class TestClientRedirect {
 
   static {
@@ -325,9 +327,9 @@ public class TestClientRedirect {
         application.setYarnApplicationState(YarnApplicationState.FINISHED);
         
application.setFinalApplicationStatus(FinalApplicationStatus.SUCCEEDED);
       }
-      String[] split = AMHOSTADDRESS.split(":");
-      application.setHost(split[0]);
-      application.setRpcPort(Integer.parseInt(split[1]));
+      HostAndPort hp = HostAndPort.fromString(AMHOSTADDRESS);
+      application.setHost(hp.getHost());
+      application.setRpcPort(hp.getPort());
       application.setUser("TestClientRedirect-user");
       application.setName("N/A");
       application.setQueue("N/A");
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
index 4a7c328..a52cf08 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/UtilsForTests.java
@@ -834,20 +834,4 @@ public class UtilsForTests {
     file.close();
     return file;
   }
-
-  /**
-   * This formats the long tasktracker name to just the FQDN
-   * @param taskTrackerLong String The long format of the tasktracker string
-   * @return String The FQDN of the tasktracker
-   * @throws Exception
-   */
-  public static String getFQDNofTT (String taskTrackerLong) throws Exception {
-    //Getting the exact FQDN of the tasktracker from the tasktracker string.
-    String[] firstSplit = taskTrackerLong.split("_");
-    String tmpOutput = firstSplit[1];
-    String[] secondSplit = tmpOutput.split(":");
-    String tmpTaskTracker = secondSplit[0];
-    return tmpTaskTracker;
-  }
-
 }
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
index 1d1c083..28bae49 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/MiniHadoopClusterManager.java
@@ -44,6 +44,7 @@ import 
org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.MiniYARNCluster;
 import org.eclipse.jetty.util.ajax.JSON;
+import com.google.common.net.HostAndPort;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -193,8 +194,8 @@ public class MiniHadoopClusterManager {
         map.put("namenode_port", dfs.getNameNodePort());
       }
       if (mr != null) {
-        map.put("resourcemanager_port", mr.getConfig().get(
-            YarnConfiguration.RM_ADDRESS).split(":")[1]);
+        map.put("resourcemanager_port", HostAndPort.fromString(
+            mr.getConfig().get(YarnConfiguration.RM_ADDRESS)).getPort());
       }
       FileWriter fw = new FileWriter(new File(writeDetails));
       fw.write(new JSON().toJSON(map));

---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to