This is an automated email from the ASF dual-hosted git repository.

ayushsaxena pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
     new 073b8ea1d51 HADOOP-18284. Remove Unnecessary semicolon ';'  (#4422). 
Contributed by fanshilun.
073b8ea1d51 is described below

commit 073b8ea1d510d81e822a4801b89ddc03b6f10e6c
Author: slfan1989 <55643692+slfan1...@users.noreply.github.com>
AuthorDate: Wed Jun 29 02:50:41 2022 -0700

    HADOOP-18284. Remove Unnecessary semicolon ';'  (#4422). Contributed by 
fanshilun.
---
 .../java/org/apache/hadoop/fs/TestLocalFileSystem.java   |  2 +-
 .../hadoop/io/file/tfile/TestTFileSeqFileComparison.java |  2 +-
 .../java/org/apache/hadoop/ipc/RPCCallBenchmark.java     |  2 +-
 .../hadoop/security/TestWhitelistBasedResolver.java      | 16 ++++++++--------
 .../hadoop/hdfs/qjournal/client/IPCLoggerChannel.java    |  4 ++--
 .../hadoop/hdfs/server/namenode/SecondaryNameNode.java   |  2 +-
 .../hadoop/hdfs/server/namenode/TestAddBlockRetry.java   |  2 +-
 .../server/namenode/TestCheckPointForSecurityTokens.java |  2 +-
 .../hadoop/hdfs/server/namenode/TestCheckpoint.java      |  2 +-
 .../hdfs/server/namenode/TestFavoredNodesEndToEnd.java   |  2 +-
 .../server/namenode/ha/TestDelegationTokensWithHA.java   |  2 +-
 .../src/main/java/org/apache/hadoop/mapred/MapTask.java  |  8 ++++----
 .../hadoop/mapreduce/lib/aggregate/LongValueMax.java     |  2 +-
 .../hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java      |  2 +-
 .../test/java/org/apache/hadoop/mapred/BigMapOutput.java |  2 +-
 .../org/apache/hadoop/mapred/NotificationTestCase.java   |  6 ++----
 .../org/apache/hadoop/mapreduce/TestValueIterReset.java  |  2 +-
 .../org/apache/hadoop/examples/pi/math/LongLong.java     |  2 +-
 .../org/apache/hadoop/yarn/client/cli/TestYarnCLI.java   |  2 +-
 .../impl/pb/ReservationAllocationStatePBImpl.java        |  2 +-
 .../hadoop/yarn/security/ConfiguredYarnAuthorizer.java   |  2 +-
 .../nodemanager/WindowsSecureContainerExecutor.java      |  2 +-
 .../capacity/ProportionalCapacityPreemptionPolicy.java   |  2 +-
 .../yarn/server/resourcemanager/TestAppManager.java      |  2 +-
 .../yarn/server/resourcemanager/TestRMAdminService.java  |  2 +-
 25 files changed, 37 insertions(+), 39 deletions(-)

diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
index 949ea423315..29ef6ca6c7a 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java
@@ -164,7 +164,7 @@ public class TestLocalFileSystem {
   public void testSyncable() throws IOException {
     FileSystem fs = fileSys.getRawFileSystem();
     Path file = new Path(TEST_ROOT_DIR, "syncable");
-    FSDataOutputStream out = fs.create(file);;
+    FSDataOutputStream out = fs.create(file);
     final int bytesWritten = 1;
     byte[] expectedBuf = new byte[] {'0', '1', '2', '3'};
     try {
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
index 48924203f34..fdd622a7f6a 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/file/tfile/TestTFileSeqFileComparison.java
@@ -516,7 +516,7 @@ public class TestTFileSeqFileComparison {
   }
 
   private static class MyOptions {
-    String rootDir = GenericTestUtils.getTestDir().getAbsolutePath();;
+    String rootDir = GenericTestUtils.getTestDir().getAbsolutePath();
     String compress = "gz";
     String format = "tfile";
     int dictSize = 1000;
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java
index 6742425e766..cca40f97c57 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java
@@ -289,7 +289,7 @@ public class RPCCallBenchmark extends TestRpcBase 
implements Tool {
           long cpuNanosClient = getTotalCpuTime(ctx.getTestThreads());
           long cpuNanosServer = -1;
           if (server != null) {
-            cpuNanosServer = getTotalCpuTime(server.getHandlers());; 
+            cpuNanosServer = getTotalCpuTime(server.getHandlers());
           }
           System.out.println("====== Results ======");
           System.out.println("Options:\n" + opts);
diff --git 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestWhitelistBasedResolver.java
 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestWhitelistBasedResolver.java
index 03fc4cbb54c..81abc42a023 100644
--- 
a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestWhitelistBasedResolver.java
+++ 
b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestWhitelistBasedResolver.java
@@ -111,16 +111,16 @@ public class TestWhitelistBasedResolver {
     assertEquals (wqr.getDefaultProperties(),
         wqr.getServerProperties(InetAddress.getByName("10.119.103.112")));
 
-    assertEquals (SASL_PRIVACY_PROPS, 
wqr.getServerProperties("10.119.103.113"));
+    assertEquals(SASL_PRIVACY_PROPS, 
wqr.getServerProperties("10.119.103.113"));
 
-    assertEquals (wqr.getDefaultProperties(), 
wqr.getServerProperties("10.221.103.121"));
+    assertEquals(wqr.getDefaultProperties(), 
wqr.getServerProperties("10.221.103.121"));
 
-    assertEquals (SASL_PRIVACY_PROPS, wqr.getServerProperties("10.221.104.0"));
-    assertEquals (SASL_PRIVACY_PROPS, 
wqr.getServerProperties("10.222.103.121"));
-    assertEquals (SASL_PRIVACY_PROPS, wqr.getServerProperties("10.223.104.0"));
-    assertEquals (SASL_PRIVACY_PROPS, 
wqr.getServerProperties("10.113.221.221"));
-    assertEquals (SASL_PRIVACY_PROPS, 
wqr.getServerProperties("10.113.221.222"));
-    assertEquals (wqr.getDefaultProperties(), 
wqr.getServerProperties("127.0.0.1"));;
+    assertEquals(SASL_PRIVACY_PROPS, wqr.getServerProperties("10.221.104.0"));
+    assertEquals(SASL_PRIVACY_PROPS, 
wqr.getServerProperties("10.222.103.121"));
+    assertEquals(SASL_PRIVACY_PROPS, wqr.getServerProperties("10.223.104.0"));
+    assertEquals(SASL_PRIVACY_PROPS, 
wqr.getServerProperties("10.113.221.221"));
+    assertEquals(SASL_PRIVACY_PROPS, 
wqr.getServerProperties("10.113.221.222"));
+    assertEquals(wqr.getDefaultProperties(), 
wqr.getServerProperties("127.0.0.1"));
 
     TestFileBasedIPList.removeFile("fixedwhitelist.txt");
     TestFileBasedIPList.removeFile("variablewhitelist.txt");
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java
index 548525ba012..58c5ad39b99 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java
@@ -745,7 +745,7 @@ public class IPCLoggerChannel implements AsyncLogger {
       URI uri = URI.create(ret.getFromURL());
       httpServerURL = getHttpServerURI(uri.getScheme(), uri.getPort());
     } else {
-      httpServerURL = getHttpServerURI("http", ret.getHttpPort());;
+      httpServerURL = getHttpServerURI("http", ret.getHttpPort());
     }
   }
 
@@ -754,7 +754,7 @@ public class IPCLoggerChannel implements AsyncLogger {
       URI uri = URI.create(ret.getFromURL());
       httpServerURL = getHttpServerURI(uri.getScheme(), uri.getPort());
     } else {
-      httpServerURL = getHttpServerURI("http", ret.getHttpPort());;
+      httpServerURL = getHttpServerURI("http", ret.getHttpPort());
     }
   }
 
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
index a0c8e9840f4..e95200b35aa 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java
@@ -803,7 +803,7 @@ public class SecondaryNameNode implements Runnable,
       geteditsizeOpt = new Option("geteditsize",
         "return the number of uncheckpointed transactions on the NameNode");
       checkpointOpt = OptionBuilder.withArgName("force")
-        .hasOptionalArg().withDescription("checkpoint on 
startup").create("checkpoint");;
+        .hasOptionalArg().withDescription("checkpoint on 
startup").create("checkpoint");
       formatOpt = new Option("format", "format the local storage during 
startup");
       helpOpt = new Option("h", "help", false, "get help information");
       
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAddBlockRetry.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAddBlockRetry.java
index 088a47e8936..1a763b5bae3 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAddBlockRetry.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAddBlockRetry.java
@@ -99,7 +99,7 @@ public class TestAddBlockRetry {
                                             HdfsConstants.GRANDFATHER_INODE_ID,
                                             "clientName", null, onRetryBlock);
     } finally {
-      ns.readUnlock();;
+      ns.readUnlock();
     }
     DatanodeStorageInfo targets[] = FSDirWriteFileOp.chooseTargetForNewBlock(
         ns.getBlockManager(), src, null, null, null, r);
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckPointForSecurityTokens.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckPointForSecurityTokens.java
index cff4e1f32f5..33fefca5fda 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckPointForSecurityTokens.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckPointForSecurityTokens.java
@@ -91,7 +91,7 @@ public class TestCheckPointForSecurityTokens {
         log.scanLog(Long.MAX_VALUE, true);
         long numTransactions = (log.getLastTxId() - log.getFirstTxId()) + 1;
         assertEquals("In-progress log " + log + " should have 5 transactions",
-                     5, numTransactions);;
+                     5, numTransactions);
       }
 
       // Saving image in safe mode should succeed
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java
index 064d5ae8395..1265863e115 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCheckpoint.java
@@ -2613,7 +2613,7 @@ public class TestCheckpoint {
   }
 
   private static CheckpointStorage spyOnSecondaryImage(SecondaryNameNode 
secondary1) {
-    CheckpointStorage spy = 
Mockito.spy((CheckpointStorage)secondary1.getFSImage());;
+    CheckpointStorage spy = 
Mockito.spy((CheckpointStorage)secondary1.getFSImage());
     secondary1.setFSImage(spy);
     return spy;
   }
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFavoredNodesEndToEnd.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFavoredNodesEndToEnd.java
index f5a112c7acd..b8cc32e43e2 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFavoredNodesEndToEnd.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFavoredNodesEndToEnd.java
@@ -150,7 +150,7 @@ public class TestFavoredNodesEndToEnd {
     d.stopDecommission();
 
     BlockLocation[] locations = getBlockLocations(p);
-    Assert.assertEquals(replication, locations[0].getNames().length);;
+    Assert.assertEquals(replication, locations[0].getNames().length);
     //also make sure that the datanode[0] is not in the list of hosts
     for (int i = 0; i < replication; i++) {
       final String loc = locations[0].getNames()[i];
diff --git 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java
 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java
index 2ef48a31f9d..bd54ba2e989 100644
--- 
a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java
+++ 
b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestDelegationTokensWithHA.java
@@ -317,7 +317,7 @@ public class TestDelegationTokensWithHA {
     longUgi.doAs(new PrivilegedExceptionAction<Void>() {
       @Override
       public Void run() throws Exception {
-        token.cancel(conf);;
+        token.cancel(conf);
         return null;
       }
     });
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
index fa4396d77f4..06d9fbbe7a3 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/MapTask.java
@@ -150,8 +150,8 @@ public class MapTask extends Task {
    * @param <V>
    */
   class TrackedRecordReader<K, V> 
-      implements RecordReader<K,V> {
-    private RecordReader<K,V> rawIn;
+      implements RecordReader<K, V> {
+    private RecordReader<K, V> rawIn;
     private Counters.Counter fileInputByteCounter;
     private Counters.Counter inputRecordCounter;
     private TaskReporter reporter;
@@ -240,7 +240,7 @@ public class MapTask extends Task {
    * This class skips the records based on the failed ranges from previous 
    * attempts.
    */
-  class SkippingRecordReader<K, V> extends TrackedRecordReader<K,V> {
+  class SkippingRecordReader<K, V> extends TrackedRecordReader<K, V> {
     private SkipRangeIterator skipIt;
     private SequenceFile.Writer skipWriter;
     private boolean toWriteSkipRecs;
@@ -930,7 +930,7 @@ public class MapTask extends Task {
     // spill accounting
     private int maxRec;
     private int softLimit;
-    boolean spillInProgress;;
+    boolean spillInProgress;
     int bufferRemaining;
     volatile Throwable sortSpillException = null;
 
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/LongValueMax.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/LongValueMax.java
index 90b4ae0f544..d574bf694f2 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/LongValueMax.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/aggregate/LongValueMax.java
@@ -96,7 +96,7 @@ public class LongValueMax implements ValueAggregator<String> {
    *         expected to be used by the a combiner.
    */
   public ArrayList<String> getCombinerOutput() {
-    ArrayList<String> retv = new ArrayList<String>(1);;
+    ArrayList<String> retv = new ArrayList<String>(1);
     retv.add("" + maxVal);
     return retv;
   }
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java
index a00146e680b..aa3acfbc546 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java
@@ -123,7 +123,7 @@ public class HsTasksBlock extends HtmlBlock {
       long sortFinishTime = -1;
       long attemptFinishTime = -1;
       long elapsedShuffleTime = -1;
-      long elapsedSortTime = -1;;
+      long elapsedSortTime = -1;
       long elapsedReduceTime = -1;
       long attemptElapsed = -1;
       TaskAttempt successful = info.getSuccessful();
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/BigMapOutput.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/BigMapOutput.java
index 35992f5de00..3aedfbf1ecb 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/BigMapOutput.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/BigMapOutput.java
@@ -70,7 +70,7 @@ public class BigMapOutput extends Configured implements Tool {
                                 BytesWritable.class, BytesWritable.class,
                                 CompressionType.NONE);
     long numBytesToWrite = fileSizeInMB * 1024 * 1024;
-    int minKeySize = conf.getInt(MIN_KEY, 10);;
+    int minKeySize = conf.getInt(MIN_KEY, 10);
     int keySizeRange = 
       conf.getInt(MAX_KEY, 1000) - minKeySize;
     int minValueSize = conf.getInt(MIN_VALUE, 0);
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
index 5df1af5cce1..3372c8f28b6 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/NotificationTestCase.java
@@ -179,8 +179,7 @@ public abstract class NotificationTestCase extends 
HadoopTestCase {
 
     // Hack for local FS that does not have the concept of a 'mounting point'
     if (isLocalFS()) {
-      String localPathRoot = System.getProperty("test.build.data","/tmp")
-        .toString().replace(' ', '+');;
+      String localPathRoot = System.getProperty("test.build.data", 
"/tmp").replace(' ', '+');
       inDir = new Path(localPathRoot, inDir);
       outDir = new Path(localPathRoot, outDir);
     }
@@ -217,8 +216,7 @@ public abstract class NotificationTestCase extends 
HadoopTestCase {
 
     // Hack for local FS that does not have the concept of a 'mounting point'
     if (isLocalFS()) {
-      String localPathRoot = System.getProperty("test.build.data","/tmp")
-        .toString().replace(' ', '+');;
+      String localPathRoot = System.getProperty("test.build.data", 
"/tmp").replace(' ', '+');
       inDir = new Path(localPathRoot, inDir);
       outDir = new Path(localPathRoot, outDir);
     }
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java
index 4bcacd8ec48..343e81f1631 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestValueIterReset.java
@@ -437,7 +437,7 @@ public class TestValueIterReset {
     int count = 0;
 
     while (values.hasNext()) {
-      i = values.next();;
+      i = values.next();
       LOG.info(key + ":" + i);
       
       if (count == 5) {
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/LongLong.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/LongLong.java
index 8d7e7a0a971..8299ba161a8 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/LongLong.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/LongLong.java
@@ -84,7 +84,7 @@ class LongLong {
     final long v = x1*y1;
 
     final long tmp = (t - u)>>>1;
-    result.d0 = ((t + u)>>>1) - v + ((tmp << MID) & FULL_MASK);;
+    result.d0 = ((t + u)>>>1) - v + ((tmp << MID) & FULL_MASK);
     result.d1 = v + (tmp >> MID);
     return result;
     */
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
index 621540677cf..22c7cc34bcf 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-client/src/test/java/org/apache/hadoop/yarn/client/cli/TestYarnCLI.java
@@ -1928,7 +1928,7 @@ public class TestYarnCLI {
     QueueCLI cli = createAndGetQueueCLI();
     when(client.getQueueInfo(any(String.class))).thenReturn(null);
     int result = cli.run(new String[] { "-status", queueName });
-    assertEquals(-1, result);;
+    assertEquals(-1, result);
     ByteArrayOutputStream baos = new ByteArrayOutputStream();
     PrintWriter pw = new PrintWriter(baos);
     pw.println("Cannot get queue from RM by queueName = " + queueName
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ReservationAllocationStatePBImpl.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ReservationAllocationStatePBImpl.java
index 88e39ec994f..48f62c1bae7 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ReservationAllocationStatePBImpl.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ReservationAllocationStatePBImpl.java
@@ -62,7 +62,7 @@ import java.util.List;
 public class ReservationAllocationStatePBImpl extends
         ReservationAllocationState {
   private ReservationAllocationStateProto proto =
-          ReservationAllocationStateProto.getDefaultInstance();;
+          ReservationAllocationStateProto.getDefaultInstance();
   private ReservationAllocationStateProto.Builder builder = null;
   private boolean viaProto = false;
 
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ConfiguredYarnAuthorizer.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ConfiguredYarnAuthorizer.java
index 615ecb0106e..227f1c960a4 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ConfiguredYarnAuthorizer.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/security/ConfiguredYarnAuthorizer.java
@@ -43,7 +43,7 @@ public class ConfiguredYarnAuthorizer extends 
YarnAuthorizationProvider {
   private final ConcurrentMap<PrivilegedEntity, Map<AccessType, 
AccessControlList>>
       allAcls = new ConcurrentHashMap<>();
   private volatile AccessControlList adminAcl = null;
-  private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();;
+  private final ReentrantReadWriteLock lock = new ReentrantReadWriteLock();
   private final ReentrantReadWriteLock.ReadLock readLock = lock.readLock();
   private final ReentrantReadWriteLock.WriteLock writeLock =  lock.writeLock();
 
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java
index c13efadff1b..6132e579ef9 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/WindowsSecureContainerExecutor.java
@@ -449,7 +449,7 @@ public class WindowsSecureContainerExecutor extends 
DefaultContainerExecutor {
       COMPLETE
     };
     
-    private State state;;
+    private State state;
     
     private final String cwd;
     private final String jobName;
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/capacity/ProportionalCapacityPreemptionPolicy.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/capacity/ProportionalCapacityPreemptionPolicy.java
index 9439c538af5..1c4d60962ee 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/capacity/ProportionalCapacityPreemptionPolicy.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/monitor/capacity/ProportionalCapacityPreemptionPolicy.java
@@ -496,7 +496,7 @@ public class ProportionalCapacityPreemptionPolicy
     Map<ApplicationAttemptId, Set<RMContainer>> toPreempt =
         new HashMap<>();
     Map<PreemptionCandidatesSelector, Map<ApplicationAttemptId,
-        Set<RMContainer>>> toPreemptPerSelector =  new HashMap<>();;
+        Set<RMContainer>>> toPreemptPerSelector =  new HashMap<>();
     for (PreemptionCandidatesSelector selector :
         candidatesSelectionPolicies) {
       long startTime = 0;
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java
index cc5e8af1762..be0dbf2a1ba 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java
@@ -1772,7 +1772,7 @@ public class TestAppManager extends AppManagerTestBase{
       RecordFactory recordFactory) {
     ContainerLaunchContext amContainer = recordFactory.newRecordInstance(
         ContainerLaunchContext.class);
-    amContainer.setApplicationACLs(new HashMap<ApplicationAccessType, 
String>());;
+    amContainer.setApplicationACLs(new HashMap<ApplicationAccessType, 
String>());
     return amContainer;
   }
 
diff --git 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java
 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java
index bc540b0ba7f..457e9d47dd0 100644
--- 
a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java
+++ 
b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java
@@ -109,7 +109,7 @@ import static org.junit.Assert.assertTrue;
 
 public class TestRMAdminService {
 
-  private Configuration configuration;;
+  private Configuration configuration;
   private MockRM rm = null;
   private FileSystem fs;
   private Path workingPath;


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to