Repository: hive
Updated Branches:
  refs/heads/master 2a8d1bf65 -> 74c7c2cd9


HIVE-15556 : Replicate views (Sankar Hariappan via Sushanth Sowmyan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/74c7c2cd
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/74c7c2cd
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/74c7c2cd

Branch: refs/heads/master
Commit: 74c7c2cd91101ee3b195da8c78deadc7bccd2257
Parents: 2a8d1bf
Author: Sushanth Sowmyan <[email protected]>
Authored: Wed Mar 15 20:55:20 2017 -0700
Committer: Sushanth Sowmyan <[email protected]>
Committed: Wed Mar 15 20:56:23 2017 -0700

----------------------------------------------------------------------
 .../hive/ql/TestReplicationScenarios.java       |  83 ++++-
 .../apache/hadoop/hive/ql/metadata/Table.java   |   8 +
 .../apache/hadoop/hive/ql/parse/EximUtil.java   |   3 -
 .../hive/ql/parse/ExportSemanticAnalyzer.java   |   8 +-
 .../hive/ql/parse/ImportSemanticAnalyzer.java   |  81 ++---
 .../ql/parse/ReplicationSemanticAnalyzer.java   |   3 +
 .../hadoop/hive/ql/plan/CreateViewDesc.java     |  22 ++
 .../hadoop/hive/ql/plan/ImportTableDesc.java    | 321 +++++++++++++++++++
 8 files changed, 467 insertions(+), 62 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/74c7c2cd/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
index c26a075..c9092b1 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
@@ -93,7 +93,7 @@ public class TestReplicationScenarios {
       return;
     }
 
-    hconf.setVar(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS,
+    hconf.setVar(HiveConf.ConfVars.METASTORE_TRANSACTIONAL_EVENT_LISTENERS,
         DBNOTIF_LISTENER_CLASSNAME); // turn on db notification listener on 
metastore
     hconf.setBoolVar(HiveConf.ConfVars.REPLCMENABLED, true);
     hconf.setBoolVar(HiveConf.ConfVars.FIRE_EVENTS_FOR_DML, true);
@@ -932,6 +932,87 @@ public class TestReplicationScenarios {
   }
 
   @Test
+  public void testViewsReplication() throws IOException {
+    String testName = "viewsReplication";
+    LOG.info("Testing "+testName);
+    String dbName = testName + "_" + tid;
+
+    run("CREATE DATABASE " + dbName);
+
+    run("CREATE TABLE " + dbName + ".unptned(a string) STORED AS TEXTFILE");
+    run("CREATE TABLE " + dbName + ".ptned(a string) partitioned by (b int) 
STORED AS TEXTFILE");
+    run("CREATE VIEW " + dbName + ".virtual_view AS SELECT * FROM " + dbName + 
".unptned");
+
+    String[] unptn_data = new String[]{ "eleven" , "twelve" };
+    String[] ptn_data_1 = new String[]{ "thirteen", "fourteen", "fifteen"};
+    String[] ptn_data_2 = new String[]{ "fifteen", "sixteen", "seventeen"};
+    String[] empty = new String[]{};
+
+    String unptn_locn = new Path(TEST_PATH , testName + 
"_unptn").toUri().getPath();
+    String ptn_locn_1 = new Path(TEST_PATH , testName + 
"_ptn1").toUri().getPath();
+    String ptn_locn_2 = new Path(TEST_PATH , testName + 
"_ptn2").toUri().getPath();
+
+    createTestDataFile(unptn_locn, unptn_data);
+    createTestDataFile(ptn_locn_1, ptn_data_1);
+    createTestDataFile(ptn_locn_2, ptn_data_2);
+
+    verifySetup("SELECT a from " + dbName + ".ptned", empty);
+    verifySetup("SELECT * from " + dbName + ".unptned", empty);
+    verifySetup("SELECT * from " + dbName + ".virtual_view", empty);
+
+    run("LOAD DATA LOCAL INPATH '" + unptn_locn + "' OVERWRITE INTO TABLE " + 
dbName + ".unptned");
+    verifySetup("SELECT * from " + dbName + ".unptned", unptn_data);
+    verifySetup("SELECT * from " + dbName + ".virtual_view", unptn_data);
+
+    run("LOAD DATA LOCAL INPATH '" + ptn_locn_1 + "' OVERWRITE INTO TABLE " + 
dbName + ".ptned PARTITION(b=1)");
+    verifySetup("SELECT a from " + dbName + ".ptned WHERE b=1", ptn_data_1);
+    run("LOAD DATA LOCAL INPATH '" + ptn_locn_2 + "' OVERWRITE INTO TABLE " + 
dbName + ".ptned PARTITION(b=2)");
+    verifySetup("SELECT a from " + dbName + ".ptned WHERE b=2", ptn_data_2);
+
+    run("CREATE MATERIALIZED VIEW " + dbName + ".mat_view AS SELECT a FROM " + 
dbName + ".ptned where b=1");
+    verifySetup("SELECT a from " + dbName + ".mat_view", ptn_data_1);
+
+    advanceDumpDir();
+    run("REPL DUMP " + dbName);
+    String replDumpLocn = getResult(0,0);
+    String replDumpId = getResult(0,1,true);
+    LOG.info("Bootstrap-dump: Dumped to {} with id 
{}",replDumpLocn,replDumpId);
+    run("REPL LOAD " + dbName + "_dupe FROM '" + replDumpLocn + "'");
+
+    verifyRun("SELECT * from " + dbName + "_dupe.virtual_view", unptn_data);
+    verifyRun("SELECT a from " + dbName + "_dupe.mat_view", ptn_data_1);
+
+    run("CREATE VIEW " + dbName + ".virtual_view2 AS SELECT a FROM " + dbName 
+ ".ptned where b=2");
+    verifySetup("SELECT a from " + dbName + ".virtual_view2", ptn_data_2);
+
+    // Create a view with name already exist. Just to verify if failure flow 
clears the added create_table event.
+    run("CREATE VIEW " + dbName + ".virtual_view2 AS SELECT a FROM " + dbName 
+ ".ptned where b=2");
+
+    run("CREATE MATERIALIZED VIEW " + dbName + ".mat_view2 AS SELECT * FROM " 
+ dbName + ".unptned");
+    verifySetup("SELECT * from " + dbName + ".mat_view2", unptn_data);
+
+    // Perform REPL-DUMP/LOAD
+    advanceDumpDir();
+    run("REPL DUMP " + dbName + " FROM " + replDumpId );
+    String incrementalDumpLocn = getResult(0,0);
+    String incrementalDumpId = getResult(0,1,true);
+    LOG.info("Incremental-dump: Dumped to {} with id {}", incrementalDumpLocn, 
incrementalDumpId);
+    run("EXPLAIN REPL LOAD " + dbName + "_dupe FROM '" + incrementalDumpLocn + 
"'");
+    printOutput();
+    run("REPL LOAD " + dbName + "_dupe FROM '"+incrementalDumpLocn+"'");
+
+    run("REPL STATUS " + dbName + "_dupe");
+    verifyResults(new String[] {incrementalDumpId});
+
+    verifyRun("SELECT * from " + dbName + "_dupe.unptned", unptn_data);
+    verifyRun("SELECT a from " + dbName + "_dupe.ptned where b=1", ptn_data_1);
+    verifyRun("SELECT * from " + dbName + "_dupe.virtual_view", unptn_data);
+    verifyRun("SELECT a from " + dbName + "_dupe.mat_view", ptn_data_1);
+    verifyRun("SELECT * from " + dbName + "_dupe.virtual_view2", ptn_data_2);
+    verifyRun("SELECT * from " + dbName + "_dupe.mat_view2", unptn_data);
+  }
+
+  @Test
   public void testStatus() throws IOException {
     // first test ReplStateMap functionality
     Map<String,Long> cmap = new ReplStateMap<String,Long>();

http://git-wip-us.apache.org/repos/asf/hive/blob/74c7c2cd/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
index 3e771ad..171f944 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Table.java
@@ -504,6 +504,14 @@ public class Table implements Serializable {
     return null;
   }
 
+  public List<String> getPartColNames() {
+    List<String> partColNames = new ArrayList<String>();
+    for (FieldSchema key : getPartCols()) {
+      partColNames.add(key.getName());
+    }
+    return partColNames;
+  }
+
   public boolean isPartitionKey(String colName) {
     return getPartColByName(colName) == null ? false : true;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/74c7c2cd/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
index 796ccc8..10cc286 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/EximUtil.java
@@ -201,9 +201,6 @@ public class EximUtil {
   }
 
   static void validateTable(org.apache.hadoop.hive.ql.metadata.Table table) 
throws SemanticException {
-    if (table.isView() || table.isMaterializedView()) {
-      throw new SemanticException(ErrorMsg.DML_AGAINST_VIEW.getMsg());
-    }
     if (table.isNonNative()) {
       throw new SemanticException(ErrorMsg.EXIM_FOR_NON_NATIVE.getMsg());
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/74c7c2cd/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
index 08bad63..e101d72 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ExportSemanticAnalyzer.java
@@ -112,10 +112,12 @@ public class ExportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
             && ts.tableHandle.isTemporary()){
           // No replication for temporary tables either
           ts = null;
+        } else if (ts.tableHandle.isView()) {
+          replicationSpec.setIsMetadataOnly(true);
         }
 
       } catch (SemanticException e) {
-        // table was a view, a non-native table or an offline table.
+        // table was a non-native table or an offline table.
         // ignore for replication, error if not.
         if (replicationSpec.isInReplicationScope()){
           ts = null; // null out ts so we can't use it.
@@ -204,11 +206,11 @@ public class ExportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
         Path fromPath = ts.tableHandle.getDataLocation();
         Path toDataPath = new Path(parentPath, EximUtil.DATA_PATH_NAME);
         Task<? extends Serializable> rTask =
-            ReplCopyTask.getDumpCopyTask(replicationSpec, fromPath, 
toDataPath, conf);
+                ReplCopyTask.getDumpCopyTask(replicationSpec, fromPath, 
toDataPath, conf);
         rootTasks.add(rTask);
         inputs.add(new ReadEntity(ts.tableHandle));
       }
-      outputs.add(toWriteEntity(parentPath,conf));
+      outputs.add(toWriteEntity(parentPath, conf));
     }
   }
 

http://git-wip-us.apache.org/repos/asf/hive/blob/74c7c2cd/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
index c398792..245c483 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ImportSemanticAnalyzer.java
@@ -61,7 +61,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.InvalidTableException;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.plan.AddPartitionDesc;
-import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
+import org.apache.hadoop.hive.ql.plan.ImportTableDesc;
 import org.apache.hadoop.hive.ql.plan.DDLWork;
 import org.apache.hadoop.hive.ql.plan.DropTableDesc;
 import org.apache.hadoop.hive.ql.plan.LoadTableDesc;
@@ -212,7 +212,12 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
 
     // Create table associated with the import
     // Executed if relevant, and used to contain all the other details about 
the table if not.
-    CreateTableDesc tblDesc = 
getBaseCreateTableDescFromTable(dbname,rv.getTable());
+    ImportTableDesc tblDesc;
+    try {
+      tblDesc = getBaseCreateTableDescFromTable(dbname, rv.getTable());
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
 
     if ((replicationSpec!= null) && replicationSpec.isInReplicationScope()){
       tblDesc.setReplicationSpec(replicationSpec);
@@ -303,7 +308,8 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
   }
 
   private static AddPartitionDesc getBaseAddPartitionDescFromPartition(
-      Path fromPath, String dbname, CreateTableDesc tblDesc, Partition 
partition) throws MetaException {
+      Path fromPath, String dbname,
+      ImportTableDesc tblDesc, Partition partition) throws MetaException, 
SemanticException {
     AddPartitionDesc partsDesc = new AddPartitionDesc(dbname, 
tblDesc.getTableName(),
         EximUtil.makePartSpec(tblDesc.getPartCols(), partition.getValues()),
         partition.getSd().getLocation(), partition.getParameters());
@@ -321,37 +327,10 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     return partsDesc;
   }
 
-  private static CreateTableDesc getBaseCreateTableDescFromTable(String dbName,
-      org.apache.hadoop.hive.metastore.api.Table table) {
-    if ((table.getPartitionKeys() == null) || (table.getPartitionKeys().size() 
== 0)){
-      table.putToParameters(StatsSetupConst.DO_NOT_UPDATE_STATS, 
StatsSetupConst.TRUE);
-    }
-    CreateTableDesc tblDesc = new CreateTableDesc(
-        dbName,
-        table.getTableName(),
-        false, // isExternal: set to false here, can be overwritten by the
-               // IMPORT stmt
-        table.isTemporary(),
-        table.getSd().getCols(),
-        table.getPartitionKeys(),
-        table.getSd().getBucketCols(),
-        table.getSd().getSortCols(),
-        table.getSd().getNumBuckets(),
-        null, null, null, null, null, // these 5 delims passed as serde params
-        null, // comment passed as table params
-        table.getSd().getInputFormat(),
-        table.getSd().getOutputFormat(),
-        null, // location: set to null here, can be
-              // overwritten by the IMPORT stmt
-        table.getSd().getSerdeInfo().getSerializationLib(),
-        null, // storagehandler passed as table params
-        table.getSd().getSerdeInfo().getParameters(),
-        table.getParameters(), false,
-        (null == table.getSd().getSkewedInfo()) ? null : 
table.getSd().getSkewedInfo()
-            .getSkewedColNames(),
-        (null == table.getSd().getSkewedInfo()) ? null : 
table.getSd().getSkewedInfo()
-            .getSkewedColValues(), null, null);
-    
tblDesc.setStoredAsSubDirectories(table.getSd().isStoredAsSubDirectories());
+  private static ImportTableDesc getBaseCreateTableDescFromTable(String dbName,
+      org.apache.hadoop.hive.metastore.api.Table tblObj) throws Exception {
+    Table table = new Table(tblObj);
+    ImportTableDesc tblDesc = new ImportTableDesc(dbName, table);
     return tblDesc;
   }
 
@@ -370,12 +349,8 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     return loadTableTask;
   }
 
-  private static Task<?> createTableTask(CreateTableDesc tableDesc, 
EximUtil.SemanticAnalyzerWrapperContext x){
-    return TaskFactory.get(new DDLWork(
-        x.getInputs(),
-        x.getOutputs(),
-        tableDesc
-    ), x.getConf());
+  private static Task<?> createTableTask(ImportTableDesc tableDesc, 
EximUtil.SemanticAnalyzerWrapperContext x){
+    return tableDesc.getCreateTableTask(x);
   }
 
   private static Task<?> dropTableTask(Table table, 
EximUtil.SemanticAnalyzerWrapperContext x){
@@ -386,21 +361,17 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     ), x.getConf());
   }
 
-  private static Task<? extends Serializable> alterTableTask(CreateTableDesc 
tableDesc,
+  private static Task<? extends Serializable> alterTableTask(ImportTableDesc 
tableDesc,
       EximUtil.SemanticAnalyzerWrapperContext x, ReplicationSpec 
replicationSpec) {
     tableDesc.setReplaceMode(true);
     if ((replicationSpec != null) && (replicationSpec.isInReplicationScope())){
       tableDesc.setReplicationSpec(replicationSpec);
     }
-    return TaskFactory.get(new DDLWork(
-        x.getInputs(),
-        x.getOutputs(),
-        tableDesc
-    ), x.getConf());
+    return tableDesc.getCreateTableTask(x);
   }
 
   private static Task<? extends Serializable> alterSinglePartition(
-      URI fromURI, FileSystem fs, CreateTableDesc tblDesc,
+      URI fromURI, FileSystem fs, ImportTableDesc tblDesc,
       Table table, Warehouse wh, AddPartitionDesc addPartitionDesc,
       ReplicationSpec replicationSpec, 
org.apache.hadoop.hive.ql.metadata.Partition ptn,
       EximUtil.SemanticAnalyzerWrapperContext x) {
@@ -416,7 +387,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     ), x.getConf());
   }
 
- private static Task<?> addSinglePartition(URI fromURI, FileSystem fs, 
CreateTableDesc tblDesc,
+ private static Task<?> addSinglePartition(URI fromURI, FileSystem fs, 
ImportTableDesc tblDesc,
       Table table, Warehouse wh,
       AddPartitionDesc addPartitionDesc, ReplicationSpec replicationSpec, 
EximUtil.SemanticAnalyzerWrapperContext x)
       throws MetaException, IOException, HiveException {
@@ -458,7 +429,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
    * Helper method to set location properly in partSpec
    */
   private static void fixLocationInPartSpec(
-      FileSystem fs, CreateTableDesc tblDesc, Table table,
+      FileSystem fs, ImportTableDesc tblDesc, Table table,
       Warehouse wh, ReplicationSpec replicationSpec,
       AddPartitionDesc.OnePartitionDesc partSpec,
       EximUtil.SemanticAnalyzerWrapperContext x) throws MetaException, 
HiveException, IOException {
@@ -515,7 +486,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     return sb.toString();
   }
 
-  private static void checkTable(Table table, CreateTableDesc tableDesc, 
ReplicationSpec replicationSpec, HiveConf conf)
+  private static void checkTable(Table table, ImportTableDesc tableDesc, 
ReplicationSpec replicationSpec, HiveConf conf)
       throws SemanticException, URISyntaxException {
     // This method gets called only in the scope that a destination table 
already exists, so
     // we're validating if the table is an appropriate destination to import 
into
@@ -737,7 +708,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
    * @param wh
    */
   private static void createRegularImportTasks(
-      CreateTableDesc tblDesc,
+      ImportTableDesc tblDesc,
       List<AddPartitionDesc> partitionDescs,
       boolean isPartSpecSet,
       ReplicationSpec replicationSpec,
@@ -773,7 +744,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     } else {
       x.getLOG().debug("table " + tblDesc.getTableName() + " does not exist");
 
-      Task<?> t = TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), 
tblDesc), x.getConf());
+      Task<?> t = createTableTask(tblDesc, x);
       table = new Table(tblDesc.getDatabaseName(), tblDesc.getTableName());
       Database parentDb = x.getHive().getDatabase(tblDesc.getDatabaseName());
 
@@ -812,7 +783,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
    * Create tasks for repl import
    */
   private static void createReplImportTasks(
-      CreateTableDesc tblDesc,
+      ImportTableDesc tblDesc,
       List<AddPartitionDesc> partitionDescs,
       boolean isPartSpecSet, ReplicationSpec replicationSpec, boolean 
waitOnPrecursor,
       Table table, URI fromURI, FileSystem fs, Warehouse wh,
@@ -963,7 +934,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
 
   }
 
-  private static boolean isPartitioned(CreateTableDesc tblDesc) {
+  private static boolean isPartitioned(ImportTableDesc tblDesc) {
     return !(tblDesc.getPartCols() == null || tblDesc.getPartCols().isEmpty());
   }
 
@@ -971,7 +942,7 @@ public class ImportSemanticAnalyzer extends 
BaseSemanticAnalyzer {
    * Utility method that returns a table if one corresponding to the 
destination
    * tblDesc is found. Returns null if no such table is found.
    */
-   private static Table tableIfExists(CreateTableDesc tblDesc, Hive db) throws 
HiveException {
+   private static Table tableIfExists(ImportTableDesc tblDesc, Hive db) throws 
HiveException {
     try {
       return db.getTable(tblDesc.getDatabaseName(),tblDesc.getTableName());
     } catch (InvalidTableException e) {

http://git-wip-us.apache.org/repos/asf/hive/blob/74c7c2cd/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
index e72d621..011df19 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
@@ -431,6 +431,9 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
         }
 
         Table qlMdTable = new Table(tobj);
+        if (qlMdTable.isView()) {
+          replicationSpec.setIsMetadataOnly(true);
+        }
 
         Path metaDataPath = new Path(evRoot, EximUtil.METADATA_NAME);
         EximUtil.createExportDump(

http://git-wip-us.apache.org/repos/asf/hive/blob/74c7c2cd/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
index a69f8e6..6cc4a40 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateViewDesc.java
@@ -24,6 +24,7 @@ import java.util.Map;
 
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 
@@ -54,6 +55,7 @@ public class CreateViewDesc extends DDLDesc implements 
Serializable {
   private String serde; // only used for materialized views
   private String storageHandler; // only used for materialized views
   private Map<String, String> serdeProps; // only used for materialized views
+  private ReplicationSpec replicationSpec = null;
 
   /**
    * For serialization only.
@@ -269,6 +271,9 @@ public class CreateViewDesc extends DDLDesc implements 
Serializable {
     return isMaterialized;
   }
 
+  public void setLocation(String location) {
+    this.location = location;
+  }
   public String getLocation() {
     return location;
   }
@@ -285,4 +290,21 @@ public class CreateViewDesc extends DDLDesc implements 
Serializable {
     return serdeProps;
   }
 
+  /**
+   * @param replicationSpec Sets the replication spec governing this create.
+   * This parameter will have meaningful values only for creates happening as 
a result of a replication.
+   */
+  public void setReplicationSpec(ReplicationSpec replicationSpec) {
+    this.replicationSpec = replicationSpec;
+  }
+
+  /**
+   * @return what kind of replication spec this create is running under.
+   */
+  public ReplicationSpec getReplicationSpec(){
+    if (replicationSpec == null){
+      this.replicationSpec = new ReplicationSpec();
+    }
+    return this.replicationSpec;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/74c7c2cd/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java
new file mode 100644
index 0000000..f43627d
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ImportTableDesc.java
@@ -0,0 +1,321 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.util.List;
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.Order;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.TaskFactory;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.Table;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.EximUtil;
+import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.plan.CreateTableDesc;
+import org.apache.hadoop.hive.ql.plan.CreateViewDesc;
+
+/**
+ * ImportTableDesc.
+ *
+ */
+public class ImportTableDesc {
+  private String dbName = null;
+  private Table table = null;
+  private CreateTableDesc createTblDesc = null;
+  private CreateViewDesc createViewDesc = null;
+
+  public enum TYPE { TABLE, VIEW };
+
+  public ImportTableDesc(String dbName, Table table) throws Exception {
+    this.dbName = dbName;
+    this.table = table;
+
+    switch (getTableType()) {
+      case TABLE:
+        this.createTblDesc = new CreateTableDesc(dbName,
+                table.getTableName(),
+                false, // isExternal: set to false here, can be overwritten by 
the IMPORT stmt
+                table.isTemporary(),
+                table.getSd().getCols(),
+                table.getPartitionKeys(),
+                table.getSd().getBucketCols(),
+                table.getSd().getSortCols(),
+                table.getSd().getNumBuckets(),
+                null, null, null, null, null, // these 5 delims passed as 
serde params
+                null, // comment passed as table params
+                table.getSd().getInputFormat(),
+                table.getSd().getOutputFormat(),
+                null, // location: set to null here, can be overwritten by the 
IMPORT stmt
+                table.getSd().getSerdeInfo().getSerializationLib(),
+                null, // storagehandler passed as table params
+                table.getSd().getSerdeInfo().getParameters(),
+                table.getParameters(), false,
+                (null == table.getSd().getSkewedInfo()) ? null : 
table.getSd().getSkewedInfo()
+                        .getSkewedColNames(),
+                (null == table.getSd().getSkewedInfo()) ? null : 
table.getSd().getSkewedInfo()
+                        .getSkewedColValues(), null, null);
+        
this.createTblDesc.setStoredAsSubDirectories(table.getSd().isStoredAsSubDirectories());
+        break;
+      case VIEW:
+        String[] qualViewName = { dbName, table.getTableName() };
+        String dbDotView = BaseSemanticAnalyzer.getDotName(qualViewName);
+        if (table.isMaterializedView()) {
+          this.createViewDesc = new CreateViewDesc(dbDotView,
+                  table.getAllCols(),
+                  null, // comment passed as table params
+                  table.getParameters(),
+                  table.getPartColNames(),
+                  false,false,false,false,
+                  table.getSd().getInputFormat(),
+                  table.getSd().getOutputFormat(),
+                  null, // location: set to null here, can be overwritten by 
the IMPORT stmt
+                  table.getSd().getSerdeInfo().getSerializationLib(),
+                  null, // storagehandler passed as table params
+                  table.getSd().getSerdeInfo().getParameters());
+        } else {
+          this.createViewDesc = new CreateViewDesc(dbDotView,
+                  table.getAllCols(),
+                  null, // comment passed as table params
+                  table.getParameters(),
+                  table.getPartColNames(),
+                  false,false,false,
+                  table.getSd().getInputFormat(),
+                  table.getSd().getOutputFormat(),
+                  table.getSd().getSerdeInfo().getSerializationLib());
+        }
+
+        this.setViewAsReferenceText(dbName, table);
+        this.createViewDesc.setPartCols(table.getPartCols());
+        break;
+      default:
+        throw new HiveException("Invalid table type");
+    }
+  }
+
+  public TYPE getTableType() {
+    if (table.isView() || table.isMaterializedView()) {
+      return TYPE.VIEW;
+    }
+    return TYPE.TABLE;
+  }
+
+  public void setViewAsReferenceText(String dbName, Table table) {
+    String originalText = table.getViewOriginalText();
+    String expandedText = table.getViewExpandedText();
+
+    if (!dbName.equals(table.getDbName())) {
+      // TODO: If the DB name doesn't match with the metadata from dump, then 
need to rewrite the original and expanded
+      // texts using new DB name. Currently it refers to the source database 
name.
+    }
+
+    this.createViewDesc.setViewOriginalText(originalText);
+    this.createViewDesc.setViewExpandedText(expandedText);
+  }
+
+  public void setReplicationSpec(ReplicationSpec replSpec) {
+    switch (getTableType()) {
+      case TABLE:
+        createTblDesc.setReplicationSpec(replSpec);
+        break;
+      case VIEW:
+        createViewDesc.setReplicationSpec(replSpec);
+        break;
+    }
+  }
+
+  public void setExternal(boolean isExternal) {
+    if (TYPE.TABLE.equals(getTableType())) {
+      createTblDesc.setExternal(isExternal);
+    }
+  }
+
+  public boolean isExternal() {
+    if (TYPE.TABLE.equals(getTableType())) {
+      return createTblDesc.isExternal();
+    }
+    return false;
+  }
+
+  public void setLocation(String location) {
+    switch (getTableType()) {
+      case TABLE:
+        createTblDesc.setLocation(location);
+        break;
+      case VIEW:
+        createViewDesc.setLocation(location);
+        break;
+    }
+  }
+
+  public String getLocation() {
+    switch (getTableType()) {
+      case TABLE:
+        return createTblDesc.getLocation();
+      case VIEW:
+        return createViewDesc.getLocation();
+    }
+    return null;
+  }
+
+  public void setTableName(String tableName) throws SemanticException {
+    switch (getTableType()) {
+      case TABLE:
+        createTblDesc.setTableName(tableName);
+        break;
+      case VIEW:
+        String[] qualViewName = { dbName, tableName };
+        String dbDotView = BaseSemanticAnalyzer.getDotName(qualViewName);
+        createViewDesc.setViewName(dbDotView);
+        break;
+    }
+  }
+
+  public String getTableName() throws SemanticException {
+    switch (getTableType()) {
+      case TABLE:
+        return createTblDesc.getTableName();
+      case VIEW:
+        String dbDotView = createViewDesc.getViewName();
+        String[] names = Utilities.getDbTableName(dbDotView);
+        return names[1]; // names[0] have the Db name and names[1] have the 
view name
+    }
+    return null;
+  }
+
+  public List<FieldSchema> getPartCols() {
+    switch (getTableType()) {
+      case TABLE:
+        return createTblDesc.getPartCols();
+      case VIEW:
+        return createViewDesc.getPartCols();
+    }
+    return null;
+  }
+
+  public List<FieldSchema> getCols() {
+    switch (getTableType()) {
+      case TABLE:
+        return createTblDesc.getCols();
+      case VIEW:
+        return createViewDesc.getSchema();
+    }
+    return null;
+  }
+
+  public Map<String, String> getTblProps() {
+    switch (getTableType()) {
+      case TABLE:
+        return createTblDesc.getTblProps();
+      case VIEW:
+        return createViewDesc.getTblProps();
+    }
+    return null;
+  }
+
+  public String getInputFormat() {
+    switch (getTableType()) {
+      case TABLE:
+        return createTblDesc.getInputFormat();
+      case VIEW:
+        return createViewDesc.getInputFormat();
+    }
+    return null;
+  }
+
+  public String getOutputFormat() {
+    switch (getTableType()) {
+      case TABLE:
+        return createTblDesc.getOutputFormat();
+      case VIEW:
+        return createViewDesc.getOutputFormat();
+    }
+    return null;
+  }
+
+  public String getSerName() {
+    switch (getTableType()) {
+      case TABLE:
+        return createTblDesc.getSerName();
+      case VIEW:
+        return createViewDesc.getSerde();
+    }
+    return null;
+  }
+
+  public Map<String, String> getSerdeProps() {
+    switch (getTableType()) {
+      case TABLE:
+        return createTblDesc.getSerdeProps();
+      case VIEW:
+        return createViewDesc.getSerdeProps();
+    }
+    return null;
+  }
+
+  public List<String> getBucketCols() {
+    if (TYPE.TABLE.equals(getTableType())) {
+      return createTblDesc.getBucketCols();
+    }
+    return null;
+  }
+
+  public List<Order> getSortCols() {
+    if (TYPE.TABLE.equals(getTableType())) {
+      return createTblDesc.getSortCols();
+    }
+    return null;
+  }
+
+  /**
+   * @param replaceMode Determine if this CreateTable should behave like a 
replace-into alter instead
+   */
+  public void setReplaceMode(boolean replaceMode) {
+    if (TYPE.TABLE.equals(getTableType())) {
+      createTblDesc.setReplaceMode(replaceMode);
+    }
+  }
+
+  public String getDatabaseName() {
+    return dbName;
+  }
+
+  public Task <?> getCreateTableTask(EximUtil.SemanticAnalyzerWrapperContext 
x) {
+    switch (getTableType()) {
+      case TABLE:
+        return TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), 
createTblDesc), x.getConf());
+      case VIEW:
+        return TaskFactory.get(new DDLWork(x.getInputs(), x.getOutputs(), 
createViewDesc), x.getConf());
+    }
+    return null;
+  }
+
+  /**
+   * @return whether this table is actually a view
+   */
+  public boolean isView() { return table.isView(); }
+
+  public boolean isMaterializedView() {
+    return table.isMaterializedView();
+  }
+}

Reply via email to