HIVE-19725: Add ability to dump non-native tables in replication metadata dump (Mahesh Kumar Behera, reviewed by Sankar Hariappan)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/6a16a71c Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/6a16a71c Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/6a16a71c Branch: refs/heads/master-txnstats Commit: 6a16a71ce99ff5d2f7bfa69cfcb475d4adc9873f Parents: 4ec256c Author: Sankar Hariappan <sank...@apache.org> Authored: Mon Jun 18 06:23:41 2018 -0700 Committer: Sankar Hariappan <sank...@apache.org> Committed: Mon Jun 18 06:23:41 2018 -0700 ---------------------------------------------------------------------- .../hadoop/hive/ql/parse/TestExportImport.java | 44 +++++++++++++++++++- ...TestReplicationScenariosAcrossInstances.java | 29 ++++++++++++- .../hadoop/hive/ql/parse/repl/dump/Utils.java | 3 +- 3 files changed, 73 insertions(+), 3 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/hive/blob/6a16a71c/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java ---------------------------------------------------------------------- diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java index 67b74c2..53d13d8 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestExportImport.java @@ -30,9 +30,12 @@ import org.junit.Test; import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; - +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; import java.io.IOException; import java.util.HashMap; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; public class TestExportImport { @@ -122,4 +125,43 @@ public class TestExportImport { .verifyResults(new String[] { "1", "2" }); } + + @Test + public void testExportNonNativeTable() throws Throwable { + String path = "hdfs:///tmp/" + dbName + "/"; + String exportPath = path + "1/"; + String exportMetaPath = exportPath + "/Meta"; + String tableName = testName.getMethodName(); + String createTableQuery = + "CREATE TABLE " + tableName + " ( serde_id bigint COMMENT 'from deserializer', name string " + + "COMMENT 'from deserializer', slib string COMMENT 'from deserializer') " + + "ROW FORMAT SERDE 'org.apache.hive.storage.jdbc.JdbcSerDe' " + + "STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' " + + "WITH SERDEPROPERTIES ('serialization.format'='1') " + + "TBLPROPERTIES ( " + + "'hive.sql.database.type'='METASTORE', " + + "'hive.sql.query'='SELECT \"SERDE_ID\", \"NAME\", \"SLIB\" FROM \"SERDES\"')"; + + srcHiveWarehouse.run("use " + dbName) + .run(createTableQuery) + .runFailure("export table " + tableName + " to '" + exportPath + "'") + .run("export table " + tableName + " to '" + exportMetaPath + "'" + " for metadata replication('1')"); + + destHiveWarehouse.run("use " + replDbName) + .runFailure("import table " + tableName + " from '" + exportPath + "'") + .run("show tables") + .verifyFailure(new String[] {tableName}) + .run("import table " + tableName + " from '" + exportMetaPath + "'") + .run("show tables") + .verifyResult(tableName); + + // check physical path + Path checkPath = new Path(exportPath); + checkPath = new Path(checkPath, EximUtil.DATA_PATH_NAME); + FileSystem fs = checkPath.getFileSystem(srcHiveWarehouse.hiveConf); + assertFalse(fs.exists(checkPath)); + checkPath = new Path(exportMetaPath); + checkPath = new Path(checkPath, EximUtil.METADATA_NAME); + assertTrue(fs.exists(checkPath)); + } } http://git-wip-us.apache.org/repos/asf/hive/blob/6a16a71c/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java ---------------------------------------------------------------------- diff --git a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java index 26e308c..0f67174 100644 --- a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java +++ b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/parse/TestReplicationScenariosAcrossInstances.java @@ -797,7 +797,7 @@ public class TestReplicationScenariosAcrossInstances { } @Test - public void shouldNotCreateDirectoryForNonNativeTableInDumpDirectory() throws Throwable { + public void testShouldNotCreateDirectoryForNonNativeTableInDumpDirectory() throws Throwable { String createTableQuery = "CREATE TABLE custom_serdes( serde_id bigint COMMENT 'from deserializer', name string " + "COMMENT 'from deserializer', slib string COMMENT 'from deserializer') " @@ -835,6 +835,33 @@ public class TestReplicationScenariosAcrossInstances { } } + @Test + public void testShouldDumpMetaDataForNonNativeTableIfSetMeataDataOnly() throws Throwable { + String tableName = testName.getMethodName() + "_table"; + String createTableQuery = + "CREATE TABLE " + tableName + " ( serde_id bigint COMMENT 'from deserializer', name string " + + "COMMENT 'from deserializer', slib string COMMENT 'from deserializer') " + + "ROW FORMAT SERDE 'org.apache.hive.storage.jdbc.JdbcSerDe' " + + "STORED BY 'org.apache.hive.storage.jdbc.JdbcStorageHandler' " + + "WITH SERDEPROPERTIES ('serialization.format'='1') " + + "TBLPROPERTIES ( " + + "'hive.sql.database.type'='METASTORE', " + + "'hive.sql.query'='SELECT \"SERDE_ID\", \"NAME\", \"SLIB\" FROM \"SERDES\"')"; + + WarehouseInstance.Tuple bootstrapTuple = primary + .run("use " + primaryDbName) + .run(createTableQuery) + .dump(primaryDbName, null, Collections.singletonList("'hive.repl.dump.metadata.only'='true'")); + + // Bootstrap load in replica + replica.load(replicatedDbName, bootstrapTuple.dumpLocation) + .status(replicatedDbName) + .verifyResult(bootstrapTuple.lastReplicationId) + .run("use " + replicatedDbName) + .run("show tables") + .verifyResult(tableName); + } + private void verifyIfCkptSet(Map<String, String> props, String dumpDir) { assertTrue(props.containsKey(ReplUtils.REPL_CHECKPOINT_KEY)); assertTrue(props.get(ReplUtils.REPL_CHECKPOINT_KEY).equals(dumpDir)); http://git-wip-us.apache.org/repos/asf/hive/blob/6a16a71c/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/Utils.java ---------------------------------------------------------------------- diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/Utils.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/Utils.java index 14572ad..e356607 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/Utils.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/dump/Utils.java @@ -172,7 +172,8 @@ public class Utils { return false; } - if (tableHandle.isNonNative()) { + // if its metadata only, then dump metadata of non native tables also. + if (tableHandle.isNonNative() && !replicationSpec.isMetadataOnly()) { return false; }