Repository: hadoop
Updated Branches:
  refs/heads/trunk 4d4d95fdd -> b8a446ba5


HADOOP-13419. Fix javadoc warnings by JDK8 in hadoop-common package. 
Contributed by Kai Sasaki.


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/b8a446ba
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/b8a446ba
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/b8a446ba

Branch: refs/heads/trunk
Commit: b8a446ba57d89c0896ec2d56dd919b0101e69f44
Parents: 4d4d95f
Author: Masatake Iwasaki <iwasak...@apache.org>
Authored: Tue Aug 16 13:30:40 2016 +0900
Committer: Masatake Iwasaki <iwasak...@apache.org>
Committed: Tue Aug 16 13:30:40 2016 +0900

----------------------------------------------------------------------
 .../java/org/apache/hadoop/fs/FileContext.java  |  4 +-
 .../apache/hadoop/io/retry/package-info.java    | 22 +++++++++
 .../org/apache/hadoop/io/retry/package.html     | 48 --------------------
 .../org/apache/hadoop/ipc/package-info.java     |  4 ++
 .../java/org/apache/hadoop/ipc/package.html     | 23 ----------
 5 files changed, 28 insertions(+), 73 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/b8a446ba/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
index e6a4cf4..f235773 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
@@ -303,7 +303,7 @@ public class FileContext {
    * 
    * @throws UnsupportedFileSystemException If the file system for
    *           <code>absOrFqPath</code> is not supported.
-   * @throws IOExcepton If the file system for <code>absOrFqPath</code> could
+   * @throws IOException If the file system for <code>absOrFqPath</code> could
    *         not be instantiated.
    */
   protected AbstractFileSystem getFSofPath(final Path absOrFqPath)
@@ -2713,7 +2713,7 @@ public class FileContext {
   /**
    * Query the effective storage policy ID for the given file or directory.
    *
-   * @param src file or directory path.
+   * @param path file or directory path.
    * @return storage policy for give file.
    * @throws IOException
    */

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b8a446ba/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/package-info.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/package-info.java
index 693065f..089cf6f 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/package-info.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/package-info.java
@@ -15,6 +15,28 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+/**
+ * A mechanism for selectively retrying methods that throw exceptions under
+ * certain circumstances.
+ * Typical usage is
+ *  UnreliableImplementation unreliableImpl = new UnreliableImplementation();
+ *  UnreliableInterface unreliable = (UnreliableInterface)
+ *  RetryProxy.create(UnreliableInterface.class, unreliableImpl,
+ *  RetryPolicies.retryUpToMaximumCountWithFixedSleep(4, 10,
+ *      TimeUnit.SECONDS));
+ *  unreliable.call();
+ *
+ * This will retry any method called on <code>unreliable</code> four times -
+ * in this case the <code>call()</code> method - sleeping 10 seconds between
+ * each retry. There are a number of
+ * {@link org.apache.hadoop.io.retry.RetryPolicies retry policies}
+ * available, or you can implement a custom one by implementing
+ * {@link org.apache.hadoop.io.retry.RetryPolicy}.
+ * It is also possible to specify retry policies on a
+ * {@link org.apache.hadoop.io.retry.RetryProxy#create(Class, Object, Map)
+ * per-method basis}.
+ */
 @InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"})
 @InterfaceStability.Evolving
 package org.apache.hadoop.io.retry;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b8a446ba/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/package.html
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/package.html
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/package.html
deleted file mode 100644
index ae553fc..0000000
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/package.html
+++ /dev/null
@@ -1,48 +0,0 @@
-<html>
-
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<body>
-
-<p>
-A mechanism for selectively retrying methods that throw exceptions under 
certain circumstances.
-</p>
-
-<p>
-Typical usage is
-</p>
-
-<pre>
-UnreliableImplementation unreliableImpl = new UnreliableImplementation();
-UnreliableInterface unreliable = (UnreliableInterface)
-  RetryProxy.create(UnreliableInterface.class, unreliableImpl,
-    RetryPolicies.retryUpToMaximumCountWithFixedSleep(4, 10, 
TimeUnit.SECONDS));
-unreliable.call();
-</pre>
-
-<p>
-This will retry any method called on <code>unreliable</code> four times - in 
this case the <code>call()</code>
-method - sleeping 10 seconds between
-each retry. There are a number of {@link 
org.apache.hadoop.io.retry.RetryPolicies retry policies}
-available, or you can implement a custom one by implementing {@link 
org.apache.hadoop.io.retry.RetryPolicy}.
-It is also possible to specify retry policies on a 
-{@link org.apache.hadoop.io.retry.RetryProxy#create(Class, Object, Map) 
per-method basis}.
-</p>
-
-</body>
-</html>

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b8a446ba/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/package-info.java
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/package-info.java
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/package-info.java
index 525ef1f..cb35e93 100644
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/package-info.java
+++ 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/package-info.java
@@ -15,6 +15,10 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
+/**
+ * Tools to help define network clients and servers.
+ */
 @InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"})
 @InterfaceStability.Evolving
 package org.apache.hadoop.ipc;

http://git-wip-us.apache.org/repos/asf/hadoop/blob/b8a446ba/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/package.html
----------------------------------------------------------------------
diff --git 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/package.html
 
b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/package.html
deleted file mode 100644
index 3efd81a..0000000
--- 
a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/package.html
+++ /dev/null
@@ -1,23 +0,0 @@
-<html>
-
-<!--
-   Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
-   this work for additional information regarding copyright ownership.
-   The ASF licenses this file to You under the Apache License, Version 2.0
-   (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
--->
-
-<body>
-Tools to help define network clients and servers.
-</body>
-</html>


---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscr...@hadoop.apache.org
For additional commands, e-mail: common-commits-h...@hadoop.apache.org

Reply via email to