Author: wheat9
Date: Wed Mar  5 00:28:21 2014
New Revision: 1574270

URL: http://svn.apache.org/r1574270
Log:
HDFS-5321. Clean up the HTTP-related configuration in HDFS. Contributed by 
Haohui Mai.

Modified:
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
    hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/SWebHdfsFileSystem.java
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
    
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpFileSystem.java

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java?rev=1574270&r1=1574269&r2=1574270&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/client/HttpFSFileSystem.java
 Wed Mar  5 00:28:21 2014
@@ -329,8 +329,7 @@ public class HttpFSFileSystem extends Fi
    */
   @Override
   protected int getDefaultPort() {
-    return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY,
-        DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT);
+    return DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT;
   }
 
   /**

Modified: hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt?rev=1574270&r1=1574269&r2=1574270&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt Wed Mar  5 
00:28:21 2014
@@ -517,6 +517,8 @@ Release 2.4.0 - UNRELEASED
 
     HDFS-6046. add dfs.client.mmap.enabled (cmccabe)
 
+    HDFS-5321. Clean up the HTTP-related configuration in HDFS (wheat9)
+
   OPTIMIZATIONS
 
     HDFS-5790. LeaseManager.findPath is very slow when many leases need 
recovery

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java?rev=1574270&r1=1574269&r2=1574270&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java
 Wed Mar  5 00:28:21 2014
@@ -123,7 +123,6 @@ public class DFSConfigKeys extends Commo
     "dfs.namenode.path.based.cache.block.map.allocation.percent";
   public static final float    
DFS_NAMENODE_PATH_BASED_CACHE_BLOCK_MAP_ALLOCATION_PERCENT_DEFAULT = 0.25f;
 
-  public static final String  DFS_NAMENODE_HTTP_PORT_KEY = "dfs.http.port";
   public static final int     DFS_NAMENODE_HTTP_PORT_DEFAULT = 50070;
   public static final String  DFS_NAMENODE_HTTP_ADDRESS_KEY = 
"dfs.namenode.http-address";
   public static final String  DFS_NAMENODE_HTTP_ADDRESS_DEFAULT = "0.0.0.0:" + 
DFS_NAMENODE_HTTP_PORT_DEFAULT;
@@ -294,7 +293,6 @@ public class DFSConfigKeys extends Commo
 
   //Following keys have no defaults
   public static final String  DFS_DATANODE_DATA_DIR_KEY = 
"dfs.datanode.data.dir";
-  public static final String  DFS_NAMENODE_HTTPS_PORT_KEY = "dfs.https.port";
   public static final int     DFS_NAMENODE_HTTPS_PORT_DEFAULT = 50470;
   public static final String  DFS_NAMENODE_HTTPS_ADDRESS_KEY = 
"dfs.namenode.https-address";
   public static final String  DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT = "0.0.0.0:" 
+ DFS_NAMENODE_HTTPS_PORT_DEFAULT;

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java?rev=1574270&r1=1574269&r2=1574270&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java
 Wed Mar  5 00:28:21 2014
@@ -42,7 +42,6 @@ import java.net.URI;
 import java.net.URISyntaxException;
 import java.security.SecureRandom;
 import java.text.SimpleDateFormat;
-import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
@@ -731,46 +730,6 @@ public class DFSUtil {
   }
 
   /**
-   * Resolve an HDFS URL into real INetSocketAddress. It works like a DNS 
resolver
-   * when the URL points to an non-HA cluster. When the URL points to an HA
-   * cluster, the resolver further resolves the logical name (i.e., the 
authority
-   * in the URL) into real namenode addresses.
-   */
-  public static InetSocketAddress[] resolveWebHdfsUri(URI uri, Configuration 
conf)
-      throws IOException {
-    int defaultPort;
-    String scheme = uri.getScheme();
-    if (WebHdfsFileSystem.SCHEME.equals(scheme)) {
-      defaultPort = DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT;
-    } else if (SWebHdfsFileSystem.SCHEME.equals(scheme)) {
-      defaultPort = DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT;
-    } else {
-      throw new IllegalArgumentException("Unsupported scheme: " + scheme);
-    }
-
-    ArrayList<InetSocketAddress> ret = new ArrayList<InetSocketAddress>();
-
-    if (!HAUtil.isLogicalUri(conf, uri)) {
-      InetSocketAddress addr = NetUtils.createSocketAddr(uri.getAuthority(),
-          defaultPort);
-      ret.add(addr);
-
-    } else {
-      Map<String, Map<String, InetSocketAddress>> addresses = DFSUtil
-          .getHaNnWebHdfsAddresses(conf, scheme);
-
-      for (Map<String, InetSocketAddress> addrs : addresses.values()) {
-        for (InetSocketAddress addr : addrs.values()) {
-          ret.add(addr);
-        }
-      }
-    }
-
-    InetSocketAddress[] r = new InetSocketAddress[ret.size()];
-    return ret.toArray(r);
-  }
-  
-  /**
    * Returns list of InetSocketAddress corresponding to  backup node rpc 
    * addresses from the configuration.
    * 

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java?rev=1574270&r1=1574269&r2=1574270&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HftpFileSystem.java
 Wed Mar  5 00:28:21 2014
@@ -123,8 +123,7 @@ public class HftpFileSystem extends File
 
   @Override
   protected int getDefaultPort() {
-    return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY,
-        DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT);
+    return DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT;
   }
 
   /**

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java?rev=1574270&r1=1574269&r2=1574270&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/HsftpFileSystem.java
 Wed Mar  5 00:28:21 2014
@@ -64,7 +64,6 @@ public class HsftpFileSystem extends Hft
 
   @Override
   protected int getDefaultPort() {
-    return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY,
-                            DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT);
+    return DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT;
   }
 }

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/SWebHdfsFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/SWebHdfsFileSystem.java?rev=1574270&r1=1574269&r2=1574270&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/SWebHdfsFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/SWebHdfsFileSystem.java
 Wed Mar  5 00:28:21 2014
@@ -42,7 +42,6 @@ public class SWebHdfsFileSystem extends 
 
   @Override
   protected int getDefaultPort() {
-    return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY,
-        DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT);
+    return DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT;
   }
 }

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java?rev=1574270&r1=1574269&r2=1574270&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
 Wed Mar  5 00:28:21 2014
@@ -29,6 +29,7 @@ import java.net.MalformedURLException;
 import java.net.URI;
 import java.net.URL;
 import java.security.PrivilegedExceptionAction;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
 import java.util.StringTokenizer;
@@ -172,7 +173,7 @@ public class WebHdfsFileSystem extends F
 
     ugi = UserGroupInformation.getCurrentUser();
     this.uri = URI.create(uri.getScheme() + "://" + uri.getAuthority());
-    this.nnAddrs = DFSUtil.resolveWebHdfsUri(this.uri, conf);
+    this.nnAddrs = resolveNNAddr();
 
     boolean isHA = HAUtil.isLogicalUri(conf, this.uri);
     // In non-HA case, the code needs to call getCanonicalUri() in order to
@@ -237,8 +238,7 @@ public class WebHdfsFileSystem extends F
 
   @Override
   protected int getDefaultPort() {
-    return getConf().getInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY,
-        DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT);
+    return DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT;
   }
 
   @Override
@@ -1082,4 +1082,36 @@ public class WebHdfsFileSystem extends F
     final Map<?, ?> m = run(op, p);
     return JsonUtil.toMD5MD5CRC32FileChecksum(m);
   }
+
+  /**
+   * Resolve an HDFS URL into real INetSocketAddress. It works like a DNS
+   * resolver when the URL points to an non-HA cluster. When the URL points to
+   * an HA cluster, the resolver further resolves the logical name (i.e., the
+   * authority in the URL) into real namenode addresses.
+   */
+  private InetSocketAddress[] resolveNNAddr() throws IOException {
+    Configuration conf = getConf();
+    final String scheme = uri.getScheme();
+
+    ArrayList<InetSocketAddress> ret = new ArrayList<InetSocketAddress>();
+
+    if (!HAUtil.isLogicalUri(conf, uri)) {
+      InetSocketAddress addr = NetUtils.createSocketAddr(uri.getAuthority(),
+          getDefaultPort());
+      ret.add(addr);
+
+    } else {
+      Map<String, Map<String, InetSocketAddress>> addresses = DFSUtil
+          .getHaNnWebHdfsAddresses(conf, scheme);
+
+      for (Map<String, InetSocketAddress> addrs : addresses.values()) {
+        for (InetSocketAddress addr : addrs.values()) {
+          ret.add(addr);
+        }
+      }
+    }
+
+    InetSocketAddress[] r = new InetSocketAddress[ret.size()];
+    return ret.toArray(r);
+  }
 }

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java?rev=1574270&r1=1574269&r2=1574270&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java
 Wed Mar  5 00:28:21 2014
@@ -32,7 +32,6 @@ import static org.apache.hadoop.hdfs.DFS
 import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMESERVICE_ID;
 import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;
 import static org.hamcrest.CoreMatchers.not;
-import static org.junit.Assert.assertArrayEquals;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNull;
@@ -579,25 +578,6 @@ public class TestDFSUtil {
     assertEquals(NS1_NN2_ADDR, map.get("ns1").get("nn2").toString());
   }
 
-  @Test
-  public void testResolve() throws IOException, URISyntaxException {
-    final String LOGICAL_HOST_NAME = "ns1";
-    final String NS1_NN1_HOST      = "ns1-nn1.example.com";
-    final String NS1_NN2_HOST      = "ns1-nn2.example.com";
-    final String NS1_NN1_ADDR      = "ns1-nn1.example.com:8020";
-    final String NS1_NN2_ADDR      = "ns1-nn2.example.com:8020";
-    final int DEFAULT_PORT         = NameNode.DEFAULT_PORT;
-
-    Configuration conf = createWebHDFSHAConfiguration(LOGICAL_HOST_NAME, 
NS1_NN1_ADDR, NS1_NN2_ADDR);
-    URI uri = new URI("webhdfs://ns1");
-    assertTrue(HAUtil.isLogicalUri(conf, uri));
-    InetSocketAddress[] addrs = DFSUtil.resolveWebHdfsUri(uri, conf);
-    assertArrayEquals(new InetSocketAddress[] {
-      new InetSocketAddress(NS1_NN1_HOST, DEFAULT_PORT),
-      new InetSocketAddress(NS1_NN2_HOST, DEFAULT_PORT),
-    }, addrs);
-  }
-
   private static Configuration createWebHDFSHAConfiguration(String 
logicalHostName, String nnaddr1, String nnaddr2) {
     HdfsConfiguration conf = new HdfsConfiguration();
 

Modified: 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpFileSystem.java
URL: 
http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpFileSystem.java?rev=1574270&r1=1574269&r2=1574270&view=diff
==============================================================================
--- 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpFileSystem.java
 (original)
+++ 
hadoop/common/trunk/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHftpFileSystem.java
 Wed Mar  5 00:28:21 2014
@@ -311,23 +311,6 @@ public class TestHftpFileSystem {
   }
 
   @Test
-  public void testHftpCustomDefaultPorts() throws IOException {
-    Configuration conf = new Configuration();
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, 123);
-
-    URI uri = URI.create("hftp://localhost";);
-    HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf);
-
-    assertEquals(123, fs.getDefaultPort());
-
-    assertEquals(uri, fs.getUri());
-
-    // HFTP uses http to get the token so canonical service name should
-    // return the http port.
-    assertEquals("127.0.0.1:123", fs.getCanonicalServiceName());
-  }
-
-  @Test
   public void testHftpCustomUriPortWithDefaultPorts() throws IOException {
     Configuration conf = new Configuration();
     URI uri = URI.create("hftp://localhost:123";);
@@ -343,12 +326,11 @@ public class TestHftpFileSystem {
   @Test
   public void testHftpCustomUriPortWithCustomDefaultPorts() throws IOException 
{
     Configuration conf = new Configuration();
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, 123);
-
     URI uri = URI.create("hftp://localhost:789";);
     HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf);
 
-    assertEquals(123, fs.getDefaultPort());
+    assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_DEFAULT,
+        fs.getDefaultPort());
 
     assertEquals(uri, fs.getUri());
     assertEquals("127.0.0.1:789", fs.getCanonicalServiceName());
@@ -383,20 +365,6 @@ public class TestHftpFileSystem {
         fs.getCanonicalServiceName());
   }
 
-  @Test
-  public void testHsftpCustomDefaultPorts() throws IOException {
-    Configuration conf = new Configuration();
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, 123);
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 456);
-
-    URI uri = URI.create("hsftp://localhost";);
-    HsftpFileSystem fs = (HsftpFileSystem) FileSystem.get(uri, conf);
-
-    assertEquals(456, fs.getDefaultPort());
-
-    assertEquals(uri, fs.getUri());
-    assertEquals("127.0.0.1:456", fs.getCanonicalServiceName());
-  }
 
   @Test
   public void testHsftpCustomUriPortWithDefaultPorts() throws IOException {
@@ -414,13 +382,12 @@ public class TestHftpFileSystem {
   @Test
   public void testHsftpCustomUriPortWithCustomDefaultPorts() throws 
IOException {
     Configuration conf = new Configuration();
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, 123);
-    conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 456);
 
     URI uri = URI.create("hsftp://localhost:789";);
     HsftpFileSystem fs = (HsftpFileSystem) FileSystem.get(uri, conf);
 
-    assertEquals(456, fs.getDefaultPort());
+    assertEquals(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_DEFAULT,
+        fs.getDefaultPort());
 
     assertEquals(uri, fs.getUri());
     assertEquals("127.0.0.1:789", fs.getCanonicalServiceName());


Reply via email to