[hadoop] branch trunk updated: HDDS-1499. OzoneManager Cache. (#798)

2019-05-19 Thread arp
This is an automated email from the ASF dual-hosted git repository.

arp pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 0d1d7c8  HDDS-1499. OzoneManager Cache. (#798)
0d1d7c8 is described below

commit 0d1d7c86ec34fabc62c0e3844aca3733024bc172
Author: Bharat Viswanadham 
AuthorDate: Sun May 19 19:23:02 2019 -0700

HDDS-1499. OzoneManager Cache. (#798)
---
 .../java/org/apache/hadoop/utils/db/DBStore.java   |   1 +
 .../java/org/apache/hadoop/utils/db/RDBTable.java  |  10 +-
 .../java/org/apache/hadoop/utils/db/Table.java |  26 +++-
 .../org/apache/hadoop/utils/db/TypedTable.java |  78 ++-
 .../org/apache/hadoop/utils/db/cache/CacheKey.java |  56 
 .../apache/hadoop/utils/db/cache/CacheValue.java   |  47 +++
 .../apache/hadoop/utils/db/cache/EpochEntry.java   |  74 +++
 .../hadoop/utils/db/cache/PartialTableCache.java   |  97 ++
 .../apache/hadoop/utils/db/cache/TableCache.java   |  63 +
 .../apache/hadoop/utils/db/cache/package-info.java |  18 +++
 .../hadoop/utils/db/TestTypedRDBTableStore.java|  82 +++-
 .../utils/db/cache/TestPartialTableCache.java  | 142 +
 .../apache/hadoop/utils/db/cache/package-info.java |  22 
 .../hadoop/ozone/om/OmMetadataManagerImpl.java |   4 +-
 14 files changed, 709 insertions(+), 11 deletions(-)

diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/DBStore.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/DBStore.java
index 56166ab..9e0c4a4 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/DBStore.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/DBStore.java
@@ -44,6 +44,7 @@ public interface DBStore extends AutoCloseable {
*/
   Table getTable(String name) throws IOException;
 
+
   /**
* Gets an existing TableStore with implicit key/value conversion.
*
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/RDBTable.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/RDBTable.java
index 88b0411..7bbe9d9 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/RDBTable.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/RDBTable.java
@@ -22,6 +22,7 @@ package org.apache.hadoop.utils.db;
 import java.io.IOException;
 import java.nio.charset.StandardCharsets;
 
+import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.hdfs.DFSUtil;
 
 import org.rocksdb.ColumnFamilyHandle;
@@ -33,9 +34,12 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
- * RocksDB implementation of ozone metadata store.
+ * RocksDB implementation of ozone metadata store. This class should be only
+ * used as part of TypedTable as it's underlying implementation to access the
+ * metadata store content. All other user's using Table should use TypedTable.
  */
-public class RDBTable implements Table {
+@InterfaceAudience.Private
+class RDBTable implements Table {
 
 
   private static final Logger LOG =
@@ -52,7 +56,7 @@ public class RDBTable implements Table {
* @param handle - ColumnFamily Handle.
* @param writeOptions - RocksDB write Options.
*/
-  public RDBTable(RocksDB db, ColumnFamilyHandle handle,
+  RDBTable(RocksDB db, ColumnFamilyHandle handle,
   WriteOptions writeOptions) {
 this.db = db;
 this.handle = handle;
diff --git 
a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/Table.java 
b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/Table.java
index 2f14e77..905a68b 100644
--- a/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/Table.java
+++ b/hadoop-hdds/common/src/main/java/org/apache/hadoop/utils/db/Table.java
@@ -21,8 +21,10 @@ package org.apache.hadoop.utils.db;
 
 import java.io.IOException;
 
+import org.apache.commons.lang3.NotImplementedException;
 import org.apache.hadoop.classification.InterfaceStability;
-
+import org.apache.hadoop.utils.db.cache.CacheKey;
+import org.apache.hadoop.utils.db.cache.CacheValue;
 /**
  * Interface for key-value store that stores ozone metadata. Ozone metadata is
  * stored as key value pairs, both key and value are arbitrary byte arrays. 
Each
@@ -98,6 +100,28 @@ public interface Table extends AutoCloseable {
   String getName() throws IOException;
 
   /**
+   * Add entry to the table cache.
+   *
+   * If the cacheKey already exists, it will override the entry.
+   * @param cacheKey
+   * @param cacheValue
+   */
+  default void addCacheEntry(CacheKey cacheKey,
+  CacheValue cacheValue) {
+throw new NotImplementedException("addCacheEntry is not implemented");
+  }
+
+  /**
+   * Removes all the entries from the table cache which are having epoch value
+   * less
+   * than or equal to specified epoch value.
+   * @param epoch
+   */
+  default void 

[hadoop] branch trunk updated: SUBMARINE-58. Submarine client needs to generate fat jar. Contributed by Zac Zhou.

2019-05-19 Thread ztang
This is an automated email from the ASF dual-hosted git repository.

ztang pushed a commit to branch trunk
in repository https://gitbox.apache.org/repos/asf/hadoop.git


The following commit(s) were added to refs/heads/trunk by this push:
 new 729ccb2  SUBMARINE-58. Submarine client needs to generate fat jar. 
Contributed by Zac Zhou.
729ccb2 is described below

commit 729ccb2cabde67ee68600438b334a4f65d947092
Author: Zhankun Tang 
AuthorDate: Sun May 19 21:18:33 2019 +0800

SUBMARINE-58. Submarine client needs to generate fat jar. Contributed by 
Zac Zhou.
---
 hadoop-submarine/hadoop-submarine-all/pom.xml  | 183 
 hadoop-submarine/hadoop-submarine-core/pom.xml |   4 +
 .../client/cli/param/runjob/RunJobParameters.java  |   5 +-
 .../param/runjob/TensorFlowRunJobParameters.java   |  10 +-
 .../submarine/common/resource/ResourceUtils.java   | 332 +
 .../common/resource/UnitsConversionUtil.java   | 164 ++
 .../submarine/common/resource/package-info.java|  19 ++
 .../src/site/markdown/QuickStart.md|  63 +++-
 .../cli/runjob/TestRunJobCliParsingCommonYaml.java |  21 +-
 .../pytorch/TestRunJobCliParsingPyTorchYaml.java   |  82 +++--
 .../TestRunJobCliParsingTensorFlowYaml.java|  85 --
 ...stRunJobCliParsingTensorFlowYamlStandalone.java |   6 +-
 .../yarn/submarine/common/MockClientContext.java   |  23 --
 .../runjob-common-yaml/empty-framework.yaml|   6 +-
 .../runjob-common-yaml/missing-configs.yaml|   6 +-
 .../runjob-common-yaml/missing-framework.yaml  |   6 +-
 .../runjob-common-yaml/some-sections-missing.yaml  |   4 +-
 .../runjob-common-yaml/test-false-values.yaml  |   4 +-
 .../runjob-common-yaml/wrong-indentation.yaml  |   6 +-
 .../runjob-common-yaml/wrong-property-name.yaml|   6 +-
 .../runjob-pytorch-yaml/envs-are-missing.yaml  |   2 +-
 .../invalid-config-ps-section.yaml |   2 +-
 .../invalid-config-tensorboard-section.yaml|   2 +-
 .../security-principal-is-missing.yaml |   2 +-
 .../valid-config-with-overrides.yaml   |   2 +-
 .../runjob-pytorch-yaml/valid-config.yaml  |   2 +-
 .../{valid-config.yaml => valid-gpu-config.yaml}   |   0
 .../runjob-tensorflow-yaml/envs-are-missing.yaml   |   6 +-
 .../security-principal-is-missing.yaml |   6 +-
 .../tensorboard-dockerimage-is-missing.yaml|   6 +-
 .../valid-config-with-overrides.yaml   |   6 +-
 .../runjob-tensorflow-yaml/valid-config.yaml   |   6 +-
 ...image-is-missing.yaml => valid-gpu-config.yaml} |   5 +-
 hadoop-submarine/hadoop-submarine-dist/pom.xml | 131 
 .../src/assembly/distribution.xml  |  61 
 .../hadoop-submarine-tony-runtime/pom.xml  |   1 +
 .../yarn/submarine/runtimes/tony/TonyUtils.java|  21 +-
 .../hadoop-submarine-yarnservice-runtime/pom.xml   |   4 +-
 .../runtimes/yarnservice/AbstractComponent.java|   6 +-
 .../component/TestTensorFlowWorkerComponent.java   |   6 +-
 .../utils/TestSubmarineResourceUtils.java  |  50 +++-
 hadoop-submarine/pom.xml   | 133 +++--
 42 files changed, 1310 insertions(+), 185 deletions(-)

diff --git a/hadoop-submarine/hadoop-submarine-all/pom.xml 
b/hadoop-submarine/hadoop-submarine-all/pom.xml
new file mode 100644
index 000..e2d2e17
--- /dev/null
+++ b/hadoop-submarine/hadoop-submarine-all/pom.xml
@@ -0,0 +1,183 @@
+
+
+http://maven.apache.org/POM/4.0.0;
+  xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance;
+  xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
+  http://maven.apache.org/xsd/maven-4.0.0.xsd;>
+  4.0.0
+  
+hadoop-submarine
+org.apache.hadoop
+0.2.0-SNAPSHOT
+  
+  ${project.artifactId}
+  ${project.version}
+  Hadoop Submarine All
+
+  
+
+${project.parent.parent.basedir}
+hadoop-submarine-all
+0.2.0-SNAPSHOT
+  
+
+  
+
+
+
+
+  org.apache.hadoop
+  hadoop-hdfs
+
+
+
+  org.apache.hadoop
+  hadoop-common
+
+
+
+  org.apache.hadoop
+  hadoop-submarine-core
+  ${project.version}
+
+  
+
+  
+
+
+  hadoop-3.2
+  
+
+  org.apache.hadoop
+  hadoop-submarine-yarnservice-runtime
+  ${project.version}
+
+
+  org.apache.hadoop
+  hadoop-submarine-tony-runtime
+  ${project.version}
+
+
+  org.apache.hadoop
+  hadoop-hdfs-client
+  ${hadoop.version}
+
+  
+
+
+
+
+  hadoop-3.1
+  
+true
+  
+  
+
+  org.apache.hadoop
+  hadoop-submarine-yarnservice-runtime
+  ${project.version}
+
+
+  org.apache.hadoop
+  hadoop-submarine-tony-runtime
+  ${project.version}
+
+
+  org.apache.hadoop
+  hadoop-hdfs-client
+