Repository: hadoop
Updated Branches:
  refs/heads/trunk 742f9d90c -> 9ae7f9eb7


http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java
index fa3708e..2c69542 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java
@@ -181,7 +181,7 @@ public static final String OUTDIR = 
"mapreduce.output.fileoutputformat.outputdir
    *  Get the {@link Path} to the task's temporary output directory 
    *  for the map-reduce job
    *  
-   * <h4 id="SideEffectFiles">Tasks' Side-Effect Files</h4>
+   * <b id="SideEffectFiles">Tasks' Side-Effect Files</b>
    * 
    * <p>Some applications need to create/write-to side-files, which differ from
    * the actual job-outputs.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/MultipleOutputs.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/MultipleOutputs.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/MultipleOutputs.java
index 24baa59..c31cab7 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/MultipleOutputs.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/output/MultipleOutputs.java
@@ -81,7 +81,7 @@ import java.util.*;
  * <p>
  * Usage in Reducer:
  * <pre>
- * <K, V> String generateFileName(K k, V v) {
+ * &lt;K, V&gt; String generateFileName(K k, V v) {
  *   return k.toString() + "_" + v.toString();
  * }
  * 
@@ -124,16 +124,16 @@ import java.util.*;
  * </p>
  * 
  * <pre>
- * private MultipleOutputs<Text, Text> out;
+ * private MultipleOutputs&lt;Text, Text&gt; out;
  * 
  * public void setup(Context context) {
- *   out = new MultipleOutputs<Text, Text>(context);
+ *   out = new MultipleOutputs&lt;Text, Text&gt;(context);
  *   ...
  * }
  * 
- * public void reduce(Text key, Iterable<Text> values, Context context) throws 
IOException, InterruptedException {
+ * public void reduce(Text key, Iterable&lt;Text&gt; values, Context context) 
throws IOException, InterruptedException {
  * for (Text t : values) {
- *   out.write(key, t, generateFileName(<<i>parameter list...</i>>));
+ *   out.write(key, t, generateFileName(&lt;<i>parameter list...</i>&gt;));
  *   }
  * }
  * 
@@ -294,7 +294,6 @@ public class MultipleOutputs<KEYOUT, VALUEOUT> {
 
   /**
    * Adds a named output for the job.
-   * <p/>
    *
    * @param job               job to add the named output
    * @param namedOutput       named output name, it has to be a word, letters

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/BinaryPartitioner.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/BinaryPartitioner.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/BinaryPartitioner.java
index 4a40840..2a89908 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/BinaryPartitioner.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/partition/BinaryPartitioner.java
@@ -64,7 +64,7 @@ import org.apache.hadoop.mapreduce.Partitioner;
  *   <li>{@link #setOffsets}</li>
  *   <li>{@link #setLeftOffset}</li>
  *   <li>{@link #setRightOffset}</li>
- * </ul></p>
+ * </ul>
  */
 @InterfaceAudience.Public
 @InterfaceStability.Evolving

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java
index 247c2f2..b9014ef 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/task/JobContextImpl.java
@@ -374,7 +374,6 @@ public class JobContextImpl implements JobContext {
    * Get the timestamps of the archives.  Used by internal
    * DistributedCache and MapReduce code.
    * @return a string array of timestamps 
-   * @throws IOException
    */
   public String[] getArchiveTimestamps() {
     return toTimestampStrs(DistributedCache.getArchiveTimestamps(conf));
@@ -384,7 +383,6 @@ public class JobContextImpl implements JobContext {
    * Get the timestamps of the files.  Used by internal
    * DistributedCache and MapReduce code.
    * @return a string array of timestamps 
-   * @throws IOException
    */
   public String[] getFileTimestamps() {
     return toTimestampStrs(DistributedCache.getFileTimestamps(conf));

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomTextWriter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomTextWriter.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomTextWriter.java
index 40e101a..6cb3211 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomTextWriter.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomTextWriter.java
@@ -42,7 +42,7 @@ import org.apache.hadoop.util.ToolRunner;
  * random sequence of words.
  * In order for this program to generate data for terasort with a 5-10 words
  * per key and 20-100 words per value, have the following config:
- * <xmp>
+ * <pre>{@code
  * <?xml version="1.0"?>
  * <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
  * <configuration>
@@ -66,7 +66,7 @@ import org.apache.hadoop.util.ToolRunner;
  *     <name>mapreduce.randomtextwriter.totalbytes</name>
  *     <value>1099511627776</value>
  *   </property>
- * </configuration></xmp>
+ * </configuration>}</pre>
  * 
  * Equivalently, {@link RandomTextWriter} also supports all the above options
  * and ones supported by {@link Tool} via the command-line.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomWriter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomWriter.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomWriter.java
index a326c8c..67c9ca8 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomWriter.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/RandomWriter.java
@@ -47,7 +47,7 @@ import org.apache.hadoop.util.ToolRunner;
  * random binary sequence file of BytesWritable.
  * In order for this program to generate data for terasort with 10-byte keys
  * and 90-byte values, have the following config:
- * <xmp>
+ * <pre>{@code
  * <?xml version="1.0"?>
  * <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
  * <configuration>
@@ -71,8 +71,7 @@ import org.apache.hadoop.util.ToolRunner;
  *     <name>mapreduce.randomwriter.totalbytes</name>
  *     <value>1099511627776</value>
  *   </property>
- * </configuration></xmp>
- * 
+ * </configuration>}</pre>
  * Equivalently, {@link RandomWriter} also supports all the above options
  * and ones supported by {@link GenericOptionsParser} via the command-line.
  */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/MultiFileWordCount.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/MultiFileWordCount.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/MultiFileWordCount.java
index d3df4b3..b51946e 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/MultiFileWordCount.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/MultiFileWordCount.java
@@ -199,7 +199,7 @@ public class MultiFileWordCount extends Configured 
implements Tool {
   }
 
   /**
-   * This Mapper is similar to the one in {@link WordCount.MapClass}.
+   * This Mapper is similar to the one in {@link WordCount.TokenizerMapper}.
    */
   public static class MapClass extends 
       Mapper<WordOffset, Text, Text, IntWritable> {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/QuasiMonteCarlo.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/QuasiMonteCarlo.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/QuasiMonteCarlo.java
index d565098..25dee6b 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/QuasiMonteCarlo.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/QuasiMonteCarlo.java
@@ -50,7 +50,7 @@ import org.apache.hadoop.util.ToolRunner;
  * where $S=[0,1)^2$ is a unit square,
  * $x=(x_1,x_2)$ is a 2-dimensional point,
  * and $f$ is a function describing the inscribed circle of the square $S$,
- * $f(x)=1$ if $(2x_1-1)^2+(2x_2-1)^2 <= 1$ and $f(x)=0$, otherwise.
+ * $f(x)=1$ if $(2x_1-1)^2+(2x_2-1)^2 &lt;= 1$ and $f(x)=0$, otherwise.
  * It is easy to see that Pi is equal to $4I$.
  * So an approximation of Pi is obtained once $I$ is evaluated numerically.
  * 
@@ -155,7 +155,7 @@ public class QuasiMonteCarlo extends Configured implements 
Tool {
     /** Map method.
      * @param offset samples starting from the (offset+1)th sample.
      * @param size the number of samples for this map
-     * @param context output {ture->numInside, false->numOutside}
+     * @param context output {ture-&gt;numInside, false-&gt;numOutside}
      */
     public void map(LongWritable offset,
                     LongWritable size,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomTextWriter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomTextWriter.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomTextWriter.java
index 4d555c6..6309ee6 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomTextWriter.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomTextWriter.java
@@ -42,7 +42,7 @@ import org.apache.hadoop.util.ToolRunner;
  * random sequence of words.
  * In order for this program to generate data for terasort with a 5-10 words
  * per key and 20-100 words per value, have the following config:
- * <xmp>
+ * <pre>{@code
  * <?xml version="1.0"?>
  * <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
  * <configuration>
@@ -66,7 +66,7 @@ import org.apache.hadoop.util.ToolRunner;
  *     <name>mapreduce.randomtextwriter.totalbytes</name>
  *     <value>1099511627776</value>
  *   </property>
- * </configuration></xmp>
+ * </configuration>}</pre>
  * 
  * Equivalently, {@link RandomTextWriter} also supports all the above options
  * and ones supported by {@link Tool} via the command-line.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomWriter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomWriter.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomWriter.java
index e1c13ec..8f322b1 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomWriter.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/RandomWriter.java
@@ -47,7 +47,7 @@ import org.apache.hadoop.util.ToolRunner;
  * random binary sequence file of BytesWritable.
  * In order for this program to generate data for terasort with 10-byte keys
  * and 90-byte values, have the following config:
- * <xmp>
+ * <pre>{@code
  * <?xml version="1.0"?>
  * <?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
  * <configuration>
@@ -71,8 +71,7 @@ import org.apache.hadoop.util.ToolRunner;
  *     <name>mapreduce.randomwriter.totalbytes</name>
  *     <value>1099511627776</value>
  *   </property>
- * </configuration></xmp>
- * 
+ * </configuration>}</pre>
  * Equivalently, {@link RandomWriter} also supports all the above options
  * and ones supported by {@link GenericOptionsParser} via the command-line.
  */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/SecondarySort.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/SecondarySort.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/SecondarySort.java
index d536ec9..8841fdc 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/SecondarySort.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/SecondarySort.java
@@ -74,7 +74,7 @@ public class SecondarySort {
     }
     /**
      * Read the two integers. 
-     * Encoded as: MIN_VALUE -> 0, 0 -> -MIN_VALUE, MAX_VALUE-> -1
+     * Encoded as: MIN_VALUE -&gt; 0, 0 -&gt; -MIN_VALUE, MAX_VALUE-&gt; -1
      */
     @Override
     public void readFields(DataInput in) throws IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/DistBbp.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/DistBbp.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/DistBbp.java
index 4484d20..268066c 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/DistBbp.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/DistBbp.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.util.ToolRunner;
  * A map/reduce program that uses a BBP-type method to compute exact 
  * binary digits of Pi.
  * This program is designed for computing the n th bit of Pi,
- * for large n, say n >= 10^8.
+ * for large n, say n &gt;= 10^8.
  * For computing lower bits of Pi, consider using bbp.
  *
  * The actually computation is done by DistSum jobs.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Modular.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Modular.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Modular.java
index 58f859d..1c039a2 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Modular.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/pi/math/Modular.java
@@ -78,7 +78,7 @@ public class Modular {
     return x >= 1? x - 1: x < 0? x + 1: x;
   }
 
-  /** Given 0 < x < y,
+  /** Given 0 &lt; x &lt; y,
    * return x^(-1) mod y.
    */
   public static long modInverse(final long x, final long y) {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/GenSort.java
----------------------------------------------------------------------
diff --git 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/GenSort.java
 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/GenSort.java
index 94f9baa..beb0743 100644
--- 
a/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/GenSort.java
+++ 
b/hadoop-mapreduce-project/hadoop-mapreduce-examples/src/main/java/org/apache/hadoop/examples/terasort/GenSort.java
@@ -28,7 +28,7 @@ import org.apache.hadoop.util.PureJavaCrc32;
 
 /** 
  * A single process data generator for the terasort data. Based on gensort.c 
- * version 1.1 (3 Mar 2009) from Chris Nyberg <chris.nyb...@ordinal.com>.
+ * version 1.1 (3 Mar 2009) from Chris Nyberg &lt;chris.nyb...@ordinal.com&gt;.
  */
 public class GenSort {
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java
index ab5b802..a7b68a9 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java
@@ -38,10 +38,10 @@ import com.google.common.collect.Sets;
 /**
  * The CopyListing abstraction is responsible for how the list of
  * sources and targets is constructed, for DistCp's copy function.
- * The copy-listing should be a SequenceFile<Text, CopyListingFileStatus>,
- * located at the path specified to buildListing(),
- * each entry being a pair of (Source relative path, source file status),
- * all the paths being fully qualified.
+ * The copy-listing should be a
+ * SequenceFile&lt;Text, CopyListingFileStatus&gt;, located at the path
+ * specified to buildListing(), each entry being a pair of (Source relative
+ * path, source file status), all the paths being fully qualified.
  */
 public abstract class CopyListing extends Configured {
 
@@ -95,8 +95,8 @@ public abstract class CopyListing extends Configured {
    * Validate input and output paths
    *
    * @param options - Input options
-   * @throws InvalidInputException: If inputs are invalid
-   * @throws IOException: any Exception with FS 
+   * @throws InvalidInputException If inputs are invalid
+   * @throws IOException any Exception with FS
    */
   protected abstract void validatePaths(DistCpOptions options)
       throws IOException, InvalidInputException;
@@ -105,7 +105,7 @@ public abstract class CopyListing extends Configured {
    * The interface to be implemented by sub-classes, to create the 
source/target file listing.
    * @param pathToListFile Path on HDFS where the listing file is written.
    * @param options Input Options for DistCp (indicating source/target paths.)
-   * @throws IOException: Thrown on failure to create the listing file.
+   * @throws IOException Thrown on failure to create the listing file.
    */
   protected abstract void doBuildListing(Path pathToListFile,
                                          DistCpOptions options) throws 
IOException;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
index d202f0a..28535a7 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
@@ -76,7 +76,7 @@ public class DistCp extends Configured implements Tool {
    * (E.g. source-paths, target-location, etc.)
    * @param inputOptions Options (indicating source-paths, target-location.)
    * @param configuration The Hadoop configuration against which the 
Copy-mapper must run.
-   * @throws Exception, on failure.
+   * @throws Exception
    */
   public DistCp(Configuration configuration, DistCpOptions inputOptions) 
throws Exception {
     Configuration config = new Configuration(configuration);
@@ -142,7 +142,7 @@ public class DistCp extends Configured implements Tool {
    * Implements the core-execution. Creates the file-list for copy,
    * and launches the Hadoop-job, to do the copy.
    * @return Job handle
-   * @throws Exception, on failure.
+   * @throws Exception
    */
   public Job execute() throws Exception {
     assert inputOptions != null;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
index d263f82..159d5ca 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCpOptionSwitch.java
@@ -105,7 +105,7 @@ public enum DistCpOptionSwitch {
    * Copy all the source files and commit them atomically to the target
    * This is typically useful in cases where there is a process
    * polling for availability of a file/dir. This option is incompatible
-   * with SYNC_FOLDERS & DELETE_MISSING
+   * with SYNC_FOLDERS and DELETE_MISSING
    */
   ATOMIC_COMMIT(DistCpConstants.CONF_LABEL_ATOMIC_COPY,
       new Option("atomic", false, "Commit all changes or none")),

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java
index 4bbc30d..525136c 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java
@@ -63,7 +63,7 @@ public class OptionsParser {
    * @param args Command-line arguments (excluding the options consumed
    *              by the GenericOptionsParser).
    * @return The Options object, corresponding to the specified command-line.
-   * @throws IllegalArgumentException: Thrown if the parse fails.
+   * @throws IllegalArgumentException Thrown if the parse fails.
    */
   public static DistCpOptions parse(String args[]) throws 
IllegalArgumentException {
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
index 197edd9..d5fdd7f 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyCommitter.java
@@ -70,7 +70,7 @@ public class CopyCommitter extends FileOutputCommitter {
     this.taskAttemptContext = context;
   }
 
-  /** @inheritDoc */
+  /** {@inheritDoc} */
   @Override
   public void commitJob(JobContext jobContext) throws IOException {
     Configuration conf = jobContext.getConfiguration();
@@ -102,7 +102,7 @@ public class CopyCommitter extends FileOutputCommitter {
     }
   }
 
-  /** @inheritDoc */
+  /** {@inheritDoc} */
   @Override
   public void abortJob(JobContext jobContext,
                        JobStatus.State state) throws IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
index ab57127..cca36df 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
@@ -45,7 +45,7 @@ import org.apache.hadoop.util.StringUtils;
 
 /**
  * Mapper class that executes the DistCp copy operation.
- * Implements the o.a.h.mapreduce.Mapper<> interface.
+ * Implements the o.a.h.mapreduce.Mapper interface.
  */
 public class CopyMapper extends Mapper<Text, CopyListingFileStatus, Text, 
Text> {
 
@@ -182,10 +182,11 @@ public class CopyMapper extends Mapper<Text, 
CopyListingFileStatus, Text, Text>
   }
 
   /**
-   * Implementation of the Mapper<>::map(). Does the copy.
+   * Implementation of the Mapper::map(). Does the copy.
    * @param relPath The target path.
    * @param sourceFileStatus The source path.
    * @throws IOException
+   * @throws InterruptedException
    */
   @Override
   public void map(Text relPath, CopyListingFileStatus sourceFileStatus,

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyOutputFormat.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyOutputFormat.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyOutputFormat.java
index eb43aa3..a5bd605 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyOutputFormat.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyOutputFormat.java
@@ -97,13 +97,13 @@ public class CopyOutputFormat<K, V> extends 
TextOutputFormat<K, V> {
     }
   }
 
-  /** @inheritDoc */
+  /** {@inheritDoc} */
   @Override
   public OutputCommitter getOutputCommitter(TaskAttemptContext context) throws 
IOException {
     return new CopyCommitter(getOutputPath(context), context);
   }
 
-  /** @inheritDoc */
+  /** {@inheritDoc} */
   @Override
   public void checkOutputSpecs(JobContext context) throws IOException {
     Configuration conf = context.getConfiguration();

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
index 1d61156..65d644b 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
@@ -84,8 +84,7 @@ public class RetriableFileCopyCommand extends 
RetriableCommand {
    * This is the actual copy-implementation.
    * @param arguments Argument-list to the command.
    * @return Number of bytes copied.
-   * @throws Exception: CopyReadException, if there are read-failures. All 
other
-   *         failures are IOExceptions.
+   * @throws Exception
    */
   @SuppressWarnings("unchecked")
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java
index 4add0bb..8dc7a65 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/UniformSizeInputFormat.java
@@ -38,7 +38,7 @@ import java.util.List;
 import java.util.ArrayList;
 
 /**
- * UniformSizeInputFormat extends the InputFormat<> class, to produce
+ * UniformSizeInputFormat extends the InputFormat class, to produce
  * input-splits for DistCp.
  * It looks at the copy-listing and groups the contents into input-splits such
  * that the total-number of bytes to be copied for each input split is
@@ -55,7 +55,7 @@ public class UniformSizeInputFormat
    * approximately equal.
    * @param context JobContext for the job.
    * @return The list of uniformly-distributed input-splits.
-   * @throws IOException: On failure.
+   * @throws IOException
    * @throws InterruptedException
    */
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java
index f5303d5..38269c7 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicInputFormat.java
@@ -64,7 +64,7 @@ public class DynamicInputFormat<K, V> extends InputFormat<K, 
V> {
    * tasks.
    * @param jobContext JobContext for the map job.
    * @return The list of (empty) dynamic input-splits.
-   * @throws IOException, on failure.
+   * @throws IOException
    * @throws InterruptedException
    */
   @Override
@@ -343,7 +343,7 @@ public class DynamicInputFormat<K, V> extends 
InputFormat<K, V> {
    * @param inputSplit The split for which the RecordReader is required.
    * @param taskAttemptContext TaskAttemptContext for the current attempt.
    * @return DynamicRecordReader instance.
-   * @throws IOException, on failure.
+   * @throws IOException
    * @throws InterruptedException
    */
   @Override

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicRecordReader.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicRecordReader.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicRecordReader.java
index 40d75f4..00b3c69 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicRecordReader.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/lib/DynamicRecordReader.java
@@ -57,7 +57,7 @@ public class DynamicRecordReader<K, V> extends 
RecordReader<K, V> {
    * RecordReader to read from chunks.
    * @param inputSplit The InputSplit for the map. Ignored entirely.
    * @param taskAttemptContext The AttemptContext.
-   * @throws IOException, on failure.
+   * @throws IOException
    * @throws InterruptedException
    */
   @Override
@@ -88,7 +88,7 @@ public class DynamicRecordReader<K, V> extends 
RecordReader<K, V> {
    * been completely exhausted, an new chunk is acquired and read,
    * transparently.
    * @return True, if the nextValue() could be traversed to. False, otherwise.
-   * @throws IOException, on failure.
+   * @throws IOException
    * @throws InterruptedException
    */
   @Override
@@ -130,7 +130,7 @@ public class DynamicRecordReader<K, V> extends 
RecordReader<K, V> {
   /**
    * Implementation of RecordReader::getCurrentKey().
    * @return The key of the current record. (i.e. the source-path.)
-   * @throws IOException, on failure.
+   * @throws IOException
    * @throws InterruptedException
    */
   @Override
@@ -142,7 +142,7 @@ public class DynamicRecordReader<K, V> extends 
RecordReader<K, V> {
   /**
    * Implementation of RecordReader::getCurrentValue().
    * @return The value of the current record. (i.e. the target-path.)
-   * @throws IOException, on failure.
+   * @throws IOException
    * @throws InterruptedException
    */
   @Override
@@ -154,7 +154,7 @@ public class DynamicRecordReader<K, V> extends 
RecordReader<K, V> {
   /**
    * Implementation of RecordReader::getProgress().
    * @return A fraction [0.0,1.0] indicating the progress of a DistCp mapper.
-   * @throws IOException, on failure.
+   * @throws IOException
    * @throws InterruptedException
    */
   @Override
@@ -192,7 +192,7 @@ public class DynamicRecordReader<K, V> extends 
RecordReader<K, V> {
   /**
    * Implementation of RecordReader::close().
    * Closes the RecordReader.
-   * @throws IOException, on failure.
+   * @throws IOException
    */
   @Override
   public void close()

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
index ca7566b..20fdf11 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/DistCpUtils.java
@@ -65,7 +65,7 @@ public class DistCpUtils {
    * @param path The path of the file whose size is sought.
    * @param configuration Configuration, to retrieve the appropriate 
FileSystem.
    * @return The file-size, in number of bytes.
-   * @throws IOException, on failure.
+   * @throws IOException
    */
   public static long getFileSize(Path path, Configuration configuration)
                                             throws IOException {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/RetriableCommand.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/RetriableCommand.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/RetriableCommand.java
index 563372e..c27b2e1 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/RetriableCommand.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/RetriableCommand.java
@@ -77,7 +77,7 @@ public abstract class RetriableCommand {
    *  2. the command may no longer be retried (e.g. runs out of 
retry-attempts).
    * @param arguments The list of arguments for the command.
    * @return Generic "Object" from doExecute(), on success.
-   * @throws IOException, IOException, on complete failure.
+   * @throws Exception
    */
   public Object execute(Object... arguments) throws Exception {
     Exception latestException;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ThrottledInputStream.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ThrottledInputStream.java
 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ThrottledInputStream.java
index d08a301..9e435d9 100644
--- 
a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ThrottledInputStream.java
+++ 
b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/util/ThrottledInputStream.java
@@ -62,7 +62,7 @@ public class ThrottledInputStream extends InputStream {
     rawStream.close();
   }
 
-  /** @inheritDoc */
+  /** {@inheritDoc} */
   @Override
   public int read() throws IOException {
     throttle();
@@ -73,7 +73,7 @@ public class ThrottledInputStream extends InputStream {
     return data;
   }
 
-  /** @inheritDoc */
+  /** {@inheritDoc} */
   @Override
   public int read(byte[] b) throws IOException {
     throttle();
@@ -84,7 +84,7 @@ public class ThrottledInputStream extends InputStream {
     return readLen;
   }
 
-  /** @inheritDoc */
+  /** {@inheritDoc} */
   @Override
   public int read(byte[] b, int off, int len) throws IOException {
     throttle();
@@ -155,7 +155,7 @@ public class ThrottledInputStream extends InputStream {
     return totalSleepTime;
   }
 
-  /** @inheritDoc */
+  /** {@inheritDoc} */
   @Override
   public String toString() {
     return "ThrottledInputStream{" +

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
 
b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
index 050bfbe..449ecbf 100644
--- 
a/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
+++ 
b/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
@@ -60,7 +60,9 @@ import org.apache.hadoop.mapreduce.lib.map.RegexMapper;
  *  b) Directory on dfs to archive the logs. 
  *  c) The sort/grep patterns for analyzing the files and separator for 
boundaries.
  * Usage: 
- * Logalyzer -archive -archiveDir <directory to archive logs> -analysis 
<directory> -logs <log-list uri> -grep <pattern> -sort <col1, col2> -separator 
<separator>   
+ * Logalyzer -archive -archiveDir &lt;directory to archive logs&gt; -analysis
+ * &lt;directory&gt; -logs &lt;log-list uri&gt; -grep &lt;pattern&gt; -sort
+ * &lt;col1, col2&gt; -separator &lt;separator&gt;
  * <p>
  */
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageEmulatorPlugin.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageEmulatorPlugin.java
 
b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageEmulatorPlugin.java
index 593c1a4..7a80e8d 100644
--- 
a/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageEmulatorPlugin.java
+++ 
b/hadoop-tools/hadoop-gridmix/src/main/java/org/apache/hadoop/mapred/gridmix/emulators/resourceusage/ResourceUsageEmulatorPlugin.java
@@ -35,7 +35,7 @@ import org.apache.hadoop.conf.Configuration;
  * {@link ResourceUsageEmulatorPlugin} is also configured with a feedback 
module
  * i.e a {@link ResourceCalculatorPlugin}, to monitor the current resource 
  * usage. {@link ResourceUsageMetrics} decides the final resource usage value 
to
- * emulate. {@link Progressive} keeps track of the task's progress.</p>
+ * emulate. {@link Progressive} keeps track of the task's progress.
  * 
  * <br><br>
  * 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/RestClientBindings.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/RestClientBindings.java
 
b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/RestClientBindings.java
index 25a7e93..d11c369 100644
--- 
a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/RestClientBindings.java
+++ 
b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/RestClientBindings.java
@@ -31,10 +31,10 @@ import static 
org.apache.hadoop.fs.swift.http.SwiftProtocolConstants.*;
 /**
  * This class implements the binding logic between Hadoop configurations
  * and the swift rest client.
- * <p/>
+ * <p>
  * The swift rest client takes a Properties instance containing
  * the string values it uses to bind to a swift endpoint.
- * <p/>
+ * <p>
  * This class extracts the values for a specific filesystem endpoint
  * and then builds an appropriate Properties file.
  */
@@ -188,7 +188,7 @@ public final class RestClientBindings {
 
   /**
    * Copy a (trimmed) property from the configuration file to the properties 
file.
-   * <p/>
+   * <p>
    * If marked as required and not found in the configuration, an
    * exception is raised.
    * If not required -and missing- then the property will not be set.

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/SwiftRestClient.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/SwiftRestClient.java
 
b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/SwiftRestClient.java
index 28f8b47..55dad11 100644
--- 
a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/SwiftRestClient.java
+++ 
b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/http/SwiftRestClient.java
@@ -1061,10 +1061,9 @@ public final class SwiftRestClient {
    * Authenticate to Openstack Keystone
    * As well as returning the access token, the member fields {@link #token},
    * {@link #endpointURI} and {@link #objectLocationURI} are set up for re-use.
-   * <p/>
+   * <p>
    * This method is re-entrant -if more than one thread attempts to 
authenticate
    * neither will block -but the field values with have those of the last 
caller.
-   * <p/>
    *
    * @return authenticated access token
    */
@@ -1575,6 +1574,7 @@ public final class SwiftRestClient {
    * @param path path to object
    * @param endpointURI damain url e.g. http://domain.com
    * @return valid URI for object
+   * @throws SwiftException
    */
   public static URI pathToURI(SwiftObjectPath path,
                               URI endpointURI) throws SwiftException {
@@ -1820,7 +1820,7 @@ public final class SwiftRestClient {
 
   /**
    * Get the blocksize of this filesystem
-   * @return a blocksize >0
+   * @return a blocksize &gt; 0
    */
   public long getBlocksizeKB() {
     return blocksizeKB;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystem.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystem.java
 
b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystem.java
index b70f7ef..27a572f 100644
--- 
a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystem.java
+++ 
b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystem.java
@@ -225,10 +225,10 @@ public class SwiftNativeFileSystem extends FileSystem {
    * Return an array containing hostnames, offset and size of
    * portions of the given file.  For a nonexistent
    * file or regions, null will be returned.
-   * <p/>
+   * <p>
    * This call is most helpful with DFS, where it returns
    * hostnames of machines that contain the given file.
-   * <p/>
+   * <p>
    * The FileSystem will simply return an elt containing 'localhost'.
    */
   @Override
@@ -645,7 +645,7 @@ public class SwiftNativeFileSystem extends FileSystem {
   /**
    * Low level method to do a deep listing of all entries, not stopping
    * at the next directory entry. This is to let tests be confident that
-   * recursive deletes &c really are working.
+   * recursive deletes really are working.
    * @param path path to recurse down
    * @param newest ask for the newest data, potentially slower than not.
    * @return a potentially empty array of file status

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java
 
b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java
index 0138eae..6d812a0 100644
--- 
a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java
+++ 
b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/snative/SwiftNativeFileSystemStore.java
@@ -518,7 +518,7 @@ public class SwiftNativeFileSystemStore {
    * Rename through copy-and-delete. this is a consequence of the
    * Swift filesystem using the path as the hash
    * into the Distributed Hash Table, "the ring" of filenames.
-   * <p/>
+   * <p>
    * Because of the nature of the operation, it is not atomic.
    *
    * @param src source file/dir
@@ -847,7 +847,7 @@ public class SwiftNativeFileSystemStore {
   }
 
   /**
-   * Insert a throttled wait if the throttle delay >0
+   * Insert a throttled wait if the throttle delay &gt; 0
    * @throws InterruptedIOException if interrupted during sleep
    */
   public void throttle() throws InterruptedIOException {
@@ -878,7 +878,7 @@ public class SwiftNativeFileSystemStore {
    * raised. This lets the caller distinguish a file not found with
    * other reasons for failure, so handles race conditions in recursive
    * directory deletes better.
-   * <p/>
+   * <p>
    * The problem being addressed is: caller A requests a recursive directory
    * of directory /dir ; caller B requests a delete of a file /dir/file,
    * between caller A enumerating the files contents, and requesting a delete

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java
 
b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java
index c9e26ac..01ec739 100644
--- 
a/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java
+++ 
b/hadoop-tools/hadoop-openstack/src/main/java/org/apache/hadoop/fs/swift/util/SwiftTestUtils.java
@@ -236,7 +236,7 @@ public class SwiftTestUtils extends org.junit.Assert {
 
   /**
    * Convert a byte to a character for printing. If the
-   * byte value is < 32 -and hence unprintable- the byte is
+   * byte value is &lt; 32 -and hence unprintable- the byte is
    * returned as a two digit hex value
    * @param b byte
    * @return the printable character string

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/InputDemuxer.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/InputDemuxer.java
 
b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/InputDemuxer.java
index cd99e1c..0927a77 100644
--- 
a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/InputDemuxer.java
+++ 
b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/InputDemuxer.java
@@ -45,12 +45,12 @@ public interface InputDemuxer extends Closeable {
   public void bindTo(Path path, Configuration conf) throws IOException;
 
   /**
-   * Get the next <name, input> pair. The name should preserve the original job
+   * Get the next &lt;name, input&gt; pair. The name should preserve the 
original job
    * history file or job conf file name. The input object should be closed
    * before calling getNext() again. The old input object would be invalid 
after
    * calling getNext() again.
    * 
-   * @return the next <name, input> pair.
+   * @return the next &lt;name, input&gt; pair.
    */
   public Pair<String, InputStream> getNext() throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java
 
b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java
index c2537be..7547eca 100644
--- 
a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java
+++ 
b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java
@@ -67,8 +67,9 @@ import org.apache.log4j.Logger;
  * ignoring user-specific and hard-to-parse keys but also provides a consistent
  * view for all possible inputs. So if users invoke the 
  * {@link #parseJobProperty(String, String)} API with either
- * <"mapreduce.job.user.name", "bob"> or <"user.name", "bob">, then the result 
- * would be a {@link UserName} {@link DataType} wrapping the user-name "bob".
+ * &lt;"mapreduce.job.user.name", "bob"&gt; or &lt;"user.name", "bob"&gt;,
+ * then the result would be a {@link UserName} {@link DataType} wrapping
+ * the user-name "bob".
  */
 @SuppressWarnings("deprecation")
 public class MapReduceJobPropertiesParser implements JobPropertyParser {

http://git-wip-us.apache.org/repos/asf/hadoop/blob/9ae7f9eb/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/package-info.java
 
b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/package-info.java
index b88b37e..2253225 100644
--- 
a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/package-info.java
+++ 
b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/package-info.java
@@ -41,7 +41,7 @@
  *        String conf_filename = .. // assume the job configuration filename 
here
  *        
  *        // construct a list of interesting properties
- *        List<String> interestedProperties = new ArrayList<String>();
+ *        List&lt;String&gt; interestedProperties = new 
ArrayList&lt;String&gt;();
  *        interestedProperties.add("mapreduce.job.name");
  *        
  *        JobConfigurationParser jcp = 
@@ -154,7 +154,7 @@
  *        TopologyBuilder tb = new TopologyBuilder();
  *        
  *        // construct a list of interesting properties
- *        List<String> interestingProperties = new ArrayList<Strng>();
+ *        List&lt;String&gt; interestingProperties = new 
ArrayList%lt;String&gt;();
  *        // add the interesting properties here
  *        interestingProperties.add("mapreduce.job.name");
  *        
@@ -207,7 +207,7 @@
  *        JobBuilder jb = new JobBuilder(jobID);
  *        
  *        // construct a list of interesting properties
- *        List<String> interestingProperties = new ArrayList<Strng>();
+ *        List&lt;String&gt; interestingProperties = new 
ArrayList%lt;String&gt;();
  *        // add the interesting properties here
  *        interestingProperties.add("mapreduce.job.name");
  *        
@@ -269,7 +269,7 @@
  *        TopologyBuilder tb = new TopologyBuilder();
  *        
  *        // construct a list of interesting properties
- *        List<String> interestingProperties = new ArrayList<Strng>();
+ *        List&lt;String&gt; interestingProperties = new 
ArrayList%lt;String&gt;();
  *        // add the interesting properties here
  *        interestingProperties.add("mapreduce.job.name");
  *        

Reply via email to