Repository: hive
Updated Branches:
  refs/heads/master aa29cd9d6 -> 4e162e01f


HIVE-16254 : metadata for values temporary tables for INSERTs are getting 
replicated during bootstrap (Anishek Agarwal, reviewed by Sushanth Sowmyan)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/4e162e01
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/4e162e01
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/4e162e01

Branch: refs/heads/master
Commit: 4e162e01f92bc5bf434bd2552bfafb24fa41f6b3
Parents: aa29cd9
Author: Sushanth Sowmyan <khorg...@gmail.com>
Authored: Tue Apr 4 09:38:50 2017 -0700
Committer: Sushanth Sowmyan <khorg...@gmail.com>
Committed: Tue Apr 4 09:39:28 2017 -0700

----------------------------------------------------------------------
 .../hive/ql/TestReplicationScenarios.java       | 43 +++++-----
 .../hive/metastore/messaging/EventUtils.java    | 87 ++------------------
 .../messaging/event/filters/AndFilter.java      | 22 +++++
 .../messaging/event/filters/BasicFilter.java    | 16 ++++
 .../event/filters/DatabaseAndTableFilter.java   | 35 ++++++++
 .../event/filters/EventBoundaryFilter.java      | 17 ++++
 .../event/filters/MessageFormatFilter.java      | 19 +++++
 ql/pom.xml                                      |  6 ++
 .../ql/parse/ReplicationSemanticAnalyzer.java   | 32 +++++--
 .../parse/TestReplicationSemanticAnalyzer.java  | 22 ++++-
 10 files changed, 187 insertions(+), 112 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/4e162e01/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
----------------------------------------------------------------------
diff --git 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
index 2688f35..4c9a1a2 100644
--- 
a/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
+++ 
b/itests/hive-unit/src/test/java/org/apache/hadoop/hive/ql/TestReplicationScenarios.java
@@ -28,19 +28,20 @@ import 
org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
 import org.apache.hadoop.hive.metastore.api.NotificationEvent;
 import org.apache.hadoop.hive.metastore.api.Partition;
 import org.apache.hadoop.hive.metastore.api.Table;
-import org.apache.hadoop.hive.metastore.messaging.EventUtils;
+import org.apache.hadoop.hive.metastore.messaging.event.filters.AndFilter;
+import 
org.apache.hadoop.hive.metastore.messaging.event.filters.DatabaseAndTableFilter;
+import 
org.apache.hadoop.hive.metastore.messaging.event.filters.EventBoundaryFilter;
+import 
org.apache.hadoop.hive.metastore.messaging.event.filters.MessageFormatFilter;
 import org.apache.hadoop.hive.metastore.messaging.MessageFactory;
 import org.apache.hadoop.hive.ql.parse.ReplicationSemanticAnalyzer;
 import org.apache.hadoop.hive.ql.parse.ReplicationSpec.ReplStateMap;
 import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.util.Shell;
 import org.apache.thrift.TException;
 import org.junit.After;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Ignore;
 import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -1177,8 +1178,8 @@ public class TestReplicationScenarios {
     // events to those that match the dbname and tblname provided to the 
filter.
     // If the tblname passed in to the filter is null, then it restricts itself
     // to dbname-matching alone.
-    IMetaStoreClient.NotificationFilter dbTblFilter = 
EventUtils.getDbTblNotificationFilter(dbname,tblname);
-    IMetaStoreClient.NotificationFilter dbFilter = 
EventUtils.getDbTblNotificationFilter(dbname,null);
+    IMetaStoreClient.NotificationFilter dbTblFilter = new 
DatabaseAndTableFilter(dbname,tblname);
+    IMetaStoreClient.NotificationFilter dbFilter = new 
DatabaseAndTableFilter(dbname,null);
 
     assertFalse(dbTblFilter.accept(null));
     assertTrue(dbTblFilter.accept(createDummyEvent(dbname, tblname, 0)));
@@ -1195,7 +1196,7 @@ public class TestReplicationScenarios {
     // within a range specified.
     long evBegin = 50;
     long evEnd = 75;
-    IMetaStoreClient.NotificationFilter evRangeFilter = 
EventUtils.getEventBoundaryFilter(evBegin,evEnd);
+    IMetaStoreClient.NotificationFilter evRangeFilter = new 
EventBoundaryFilter(evBegin,evEnd);
 
     assertTrue(evBegin < evEnd);
     assertFalse(evRangeFilter.accept(null));
@@ -1211,9 +1212,9 @@ public class TestReplicationScenarios {
     // that match a provided message format
 
     IMetaStoreClient.NotificationFilter restrictByDefaultMessageFormat =
-        
EventUtils.restrictByMessageFormat(MessageFactory.getInstance().getMessageFormat());
+        new 
MessageFormatFilter(MessageFactory.getInstance().getMessageFormat());
     IMetaStoreClient.NotificationFilter restrictByArbitraryMessageFormat =
-        
EventUtils.restrictByMessageFormat(MessageFactory.getInstance().getMessageFormat()
 + "_bogus");
+        new 
MessageFormatFilter(MessageFactory.getInstance().getMessageFormat() + "_bogus");
     NotificationEvent dummyEvent = createDummyEvent(dbname,tblname,0);
 
     
assertEquals(MessageFactory.getInstance().getMessageFormat(),dummyEvent.getMessageFormat());
@@ -1238,19 +1239,19 @@ public class TestReplicationScenarios {
       }
     };
 
-    assertTrue(EventUtils.andFilter(yes, yes).accept(dummyEvent));
-    assertFalse(EventUtils.andFilter(yes, no).accept(dummyEvent));
-    assertFalse(EventUtils.andFilter(no, yes).accept(dummyEvent));
-    assertFalse(EventUtils.andFilter(no, no).accept(dummyEvent));
-
-    assertTrue(EventUtils.andFilter(yes, yes, yes).accept(dummyEvent));
-    assertFalse(EventUtils.andFilter(yes, yes, no).accept(dummyEvent));
-    assertFalse(EventUtils.andFilter(yes, no, yes).accept(dummyEvent));
-    assertFalse(EventUtils.andFilter(yes, no, no).accept(dummyEvent));
-    assertFalse(EventUtils.andFilter(no, yes, yes).accept(dummyEvent));
-    assertFalse(EventUtils.andFilter(no, yes, no).accept(dummyEvent));
-    assertFalse(EventUtils.andFilter(no, no, yes).accept(dummyEvent));
-    assertFalse(EventUtils.andFilter(no, no, no).accept(dummyEvent));
+    assertTrue(new AndFilter(yes, yes).accept(dummyEvent));
+    assertFalse(new AndFilter(yes, no).accept(dummyEvent));
+    assertFalse(new AndFilter(no, yes).accept(dummyEvent));
+    assertFalse(new AndFilter(no, no).accept(dummyEvent));
+
+    assertTrue(new AndFilter(yes, yes, yes).accept(dummyEvent));
+    assertFalse(new AndFilter(yes, yes, no).accept(dummyEvent));
+    assertFalse(new AndFilter(yes, no, yes).accept(dummyEvent));
+    assertFalse(new AndFilter(yes, no, no).accept(dummyEvent));
+    assertFalse(new AndFilter(no, yes, yes).accept(dummyEvent));
+    assertFalse(new AndFilter(no, yes, no).accept(dummyEvent));
+    assertFalse(new AndFilter(no, no, yes).accept(dummyEvent));
+    assertFalse(new AndFilter(no, no, no).accept(dummyEvent));
   }
 
   private NotificationEvent createDummyEvent(String dbname, String tblname, 
long evid) {

http://git-wip-us.apache.org/repos/asf/hive/blob/4e162e01/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java
index a5414d1..8205c25 100644
--- 
a/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/EventUtils.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.metastore.messaging;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+import 
org.apache.hadoop.hive.metastore.messaging.event.filters.DatabaseAndTableFilter;
 import org.apache.thrift.TException;
 
 import java.io.IOException;
@@ -30,88 +31,10 @@ import java.util.List;
 
 public class EventUtils {
 
-  /**
-   * Utility function that constructs a notification filter to match a given 
db name and/or table name.
-   * If dbName == null, fetches all warehouse events.
-   * If dnName != null, but tableName == null, fetches all events for the db
-   * If dbName != null && tableName != null, fetches all events for the 
specified table
-   * @param dbName
-   * @param tableName
-   * @return
-   */
-  public static IMetaStoreClient.NotificationFilter 
getDbTblNotificationFilter(final String dbName, final String tableName){
-    return new IMetaStoreClient.NotificationFilter() {
-      @Override
-      public boolean accept(NotificationEvent event) {
-        if (event == null){
-          return false; // get rid of trivial case first, so that we can 
safely assume non-null
-        }
-        if (dbName == null){
-          return true; // if our dbName is null, we're interested in all wh 
events
-        }
-        if (dbName.equalsIgnoreCase(event.getDbName())){
-          if ( (tableName == null)
-              // if our dbName is equal, but tableName is blank, we're 
interested in this db-level event
-              || (tableName.equalsIgnoreCase(event.getTableName()))
-            // table level event that matches us
-              ){
-            return true;
-          }
-        }
-        return false;
-      }
-    };
-  }
-
-  public static IMetaStoreClient.NotificationFilter 
restrictByMessageFormat(final String messageFormat){
-    return new IMetaStoreClient.NotificationFilter() {
-      @Override
-      public boolean accept(NotificationEvent event) {
-        if (event == null){
-          return false; // get rid of trivial case first, so that we can 
safely assume non-null
-        }
-        if (messageFormat == null){
-          return true; // let's say that passing null in will not do any 
filtering.
-        }
-        if (messageFormat.equalsIgnoreCase(event.getMessageFormat())){
-          return true;
-        }
-        return false;
-      }
-    };
-  }
-
-  public static IMetaStoreClient.NotificationFilter 
getEventBoundaryFilter(final Long eventFrom, final Long eventTo){
-    return new IMetaStoreClient.NotificationFilter() {
-      @Override
-      public boolean accept(NotificationEvent event) {
-        if ( (event == null) || (event.getEventId() < eventFrom) || 
(event.getEventId() > eventTo)) {
-          return false;
-        }
-        return true;
-      }
-    };
-  }
-
-  public static IMetaStoreClient.NotificationFilter andFilter(
-      final IMetaStoreClient.NotificationFilter... filters ) {
-    return new IMetaStoreClient.NotificationFilter() {
-      @Override
-      public boolean accept(NotificationEvent event) {
-        for (IMetaStoreClient.NotificationFilter filter : filters){
-          if (!filter.accept(event)){
-            return false;
-          }
-        }
-        return true;
-      }
-    };
-  }
-
   public interface NotificationFetcher {
-    public int getBatchSize() throws IOException;
-    public long getCurrentNotificationEventId() throws IOException;
-    public List<NotificationEvent> getNextNotificationEvents(
+    int getBatchSize() throws IOException;
+    long getCurrentNotificationEventId() throws IOException;
+    List<NotificationEvent> getNextNotificationEvents(
         long pos, IMetaStoreClient.NotificationFilter filter) throws 
IOException;
   }
 
@@ -177,7 +100,7 @@ public class EventUtils {
     public NotificationEventIterator(
         NotificationFetcher nfetcher, long eventFrom, int maxEvents,
         String dbName, String tableName) throws IOException {
-      init(nfetcher, eventFrom, maxEvents, 
EventUtils.getDbTblNotificationFilter(dbName, tableName));
+      init(nfetcher, eventFrom, maxEvents, new DatabaseAndTableFilter(dbName, 
tableName));
       // using init(..) instead of this(..) because the 
EventUtils.getDbTblNotificationFilter
       // is an operation that needs to run before delegating to the other 
ctor, and this messes up chaining
       // ctors

http://git-wip-us.apache.org/repos/asf/hive/blob/4e162e01/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/AndFilter.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/AndFilter.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/AndFilter.java
new file mode 100644
index 0000000..f9fa93a
--- /dev/null
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/AndFilter.java
@@ -0,0 +1,22 @@
+package org.apache.hadoop.hive.metastore.messaging.event.filters;
+
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+
+public class AndFilter implements IMetaStoreClient.NotificationFilter {
+  final IMetaStoreClient.NotificationFilter[] filters;
+
+  public AndFilter(final IMetaStoreClient.NotificationFilter... filters) {
+    this.filters = filters;
+  }
+
+  @Override
+  public boolean accept(final NotificationEvent event) {
+    for (IMetaStoreClient.NotificationFilter filter : filters) {
+      if (!filter.accept(event)) {
+        return false;
+      }
+    }
+    return true;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/4e162e01/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/BasicFilter.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/BasicFilter.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/BasicFilter.java
new file mode 100644
index 0000000..5d6fa5c
--- /dev/null
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/BasicFilter.java
@@ -0,0 +1,16 @@
+package org.apache.hadoop.hive.metastore.messaging.event.filters;
+
+import org.apache.hadoop.hive.metastore.IMetaStoreClient.NotificationFilter;
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+
+public abstract class BasicFilter implements NotificationFilter {
+  @Override
+  public boolean accept(final NotificationEvent event) {
+    if (event == null) {
+      return false; // get rid of trivial case first, so that we can safely 
assume non-null
+    }
+    return shouldAccept(event);
+  }
+
+  abstract boolean shouldAccept(final NotificationEvent event);
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/4e162e01/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/DatabaseAndTableFilter.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/DatabaseAndTableFilter.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/DatabaseAndTableFilter.java
new file mode 100644
index 0000000..2ddf354
--- /dev/null
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/DatabaseAndTableFilter.java
@@ -0,0 +1,35 @@
+package org.apache.hadoop.hive.metastore.messaging.event.filters;
+
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+
+/**
+ * Utility function that constructs a notification filter to match a given db 
name and/or table name.
+ * If dbName == null, fetches all warehouse events.
+ * If dnName != null, but tableName == null, fetches all events for the db
+ * If dbName != null && tableName != null, fetches all events for the 
specified table
+ */
+public class DatabaseAndTableFilter extends BasicFilter {
+  private final String databaseName, tableName;
+
+  public DatabaseAndTableFilter(final String databaseName, final String 
tableName) {
+    this.databaseName = databaseName;
+    this.tableName = tableName;
+  }
+
+  @Override
+  boolean shouldAccept(final NotificationEvent event) {
+    if (databaseName == null) {
+      return true; // if our dbName is null, we're interested in all wh events
+    }
+    if (databaseName.equalsIgnoreCase(event.getDbName())) {
+      if ((tableName == null)
+          // if our dbName is equal, but tableName is blank, we're interested 
in this db-level event
+          || (tableName.equalsIgnoreCase(event.getTableName()))
+        // table level event that matches us
+          ) {
+        return true;
+      }
+    }
+    return false;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/4e162e01/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/EventBoundaryFilter.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/EventBoundaryFilter.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/EventBoundaryFilter.java
new file mode 100644
index 0000000..629b97b
--- /dev/null
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/EventBoundaryFilter.java
@@ -0,0 +1,17 @@
+package org.apache.hadoop.hive.metastore.messaging.event.filters;
+
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+
+public class EventBoundaryFilter extends BasicFilter {
+  private final long eventFrom, eventTo;
+
+  public EventBoundaryFilter(final long eventFrom, final long eventTo) {
+    this.eventFrom = eventFrom;
+    this.eventTo = eventTo;
+  }
+
+  @Override
+  boolean shouldAccept(final NotificationEvent event) {
+    return eventFrom <= event.getEventId() && event.getEventId() <= eventTo;
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/4e162e01/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/MessageFormatFilter.java
----------------------------------------------------------------------
diff --git 
a/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/MessageFormatFilter.java
 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/MessageFormatFilter.java
new file mode 100644
index 0000000..3d4fbc4
--- /dev/null
+++ 
b/metastore/src/java/org/apache/hadoop/hive/metastore/messaging/event/filters/MessageFormatFilter.java
@@ -0,0 +1,19 @@
+package org.apache.hadoop.hive.metastore.messaging.event.filters;
+
+import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+
+public class MessageFormatFilter extends BasicFilter {
+  private final String format;
+
+  public MessageFormatFilter(String format) {
+    this.format = format;
+  }
+
+  @Override
+  boolean shouldAccept(final NotificationEvent event) {
+    if (format == null) {
+      return true; // let's say that passing null in will not do any filtering.
+    }
+    return format.equalsIgnoreCase(event.getMessageFormat());
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/4e162e01/ql/pom.xml
----------------------------------------------------------------------
diff --git a/ql/pom.xml b/ql/pom.xml
index 2d99c07..cba4ffa 100644
--- a/ql/pom.xml
+++ b/ql/pom.xml
@@ -712,6 +712,12 @@
       <version>${glassfish.jersey.version}</version>
       <scope>test</scope>
     </dependency>
+    <dependency>
+      <groupId>org.hamcrest</groupId>
+      <artifactId>hamcrest-all</artifactId>
+      <version>${hamcrest.version}</version>
+      <scope>test</scope>
+    </dependency>
   </dependencies>
 
   <profiles>

http://git-wip-us.apache.org/repos/asf/hive/blob/4e162e01/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
index 05d7be1..3ac7746 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ReplicationSemanticAnalyzer.java
@@ -21,6 +21,8 @@ import com.google.common.base.Function;
 import com.google.common.collect.Iterables;
 import com.google.common.primitives.Ints;
 import org.antlr.runtime.tree.Tree;
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.commons.collections.Predicate;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -30,6 +32,10 @@ import org.apache.hadoop.hive.metastore.ReplChangeManager;
 import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.NotificationEvent;
+import org.apache.hadoop.hive.metastore.messaging.event.filters.AndFilter;
+import 
org.apache.hadoop.hive.metastore.messaging.event.filters.DatabaseAndTableFilter;
+import 
org.apache.hadoop.hive.metastore.messaging.event.filters.EventBoundaryFilter;
+import 
org.apache.hadoop.hive.metastore.messaging.event.filters.MessageFormatFilter;
 import org.apache.hadoop.hive.metastore.messaging.AddPartitionMessage;
 import org.apache.hadoop.hive.metastore.messaging.AlterPartitionMessage;
 import org.apache.hadoop.hive.metastore.messaging.AlterTableMessage;
@@ -359,7 +365,7 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
         // during the bootstrap period and consolidate them with our dump.
 
         IMetaStoreClient.NotificationFilter evFilter =
-            EventUtils.getDbTblNotificationFilter(dbNameOrPattern, 
tblNameOrPattern);
+            new DatabaseAndTableFilter(dbNameOrPattern, tblNameOrPattern);
         EventUtils.MSClientNotificationFetcher evFetcher =
             new EventUtils.MSClientNotificationFetcher(db.getMSC());
         EventUtils.NotificationEventIterator evIter = new 
EventUtils.NotificationEventIterator(
@@ -402,10 +408,10 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
         // same factory, restricting by message format is effectively a guard 
against
         // older leftover data that would cause us problems.
 
-        IMetaStoreClient.NotificationFilter evFilter = EventUtils.andFilter(
-            EventUtils.getDbTblNotificationFilter(dbNameOrPattern, 
tblNameOrPattern),
-            EventUtils.getEventBoundaryFilter(eventFrom, eventTo),
-            
EventUtils.restrictByMessageFormat(MessageFactory.getInstance().getMessageFormat()));
+        IMetaStoreClient.NotificationFilter evFilter = new AndFilter(
+            new DatabaseAndTableFilter(dbNameOrPattern, tblNameOrPattern),
+            new EventBoundaryFilter(eventFrom, eventTo),
+            new 
MessageFormatFilter(MessageFactory.getInstance().getMessageFormat()));
 
         EventUtils.MSClientNotificationFetcher evFetcher
             = new EventUtils.MSClientNotificationFetcher(db.getMSC());
@@ -1228,12 +1234,26 @@ public class ReplicationSemanticAnalyzer extends 
BaseSemanticAnalyzer {
   private Iterable<? extends String> matchesTbl(String dbName, String 
tblPattern)
       throws HiveException {
     if (tblPattern == null) {
-      return db.getAllTables(dbName);
+      return removeValuesTemporaryTables(db.getAllTables(dbName));
     } else {
       return db.getTablesByPattern(dbName, tblPattern);
     }
   }
 
+  private final static String TMP_TABLE_PREFIX =
+      SemanticAnalyzer.VALUES_TMP_TABLE_NAME_PREFIX.toLowerCase();
+
+  static Iterable<String> removeValuesTemporaryTables(List<String> tableNames) 
{
+    List<String> allTables = new ArrayList<>(tableNames);
+    CollectionUtils.filter(allTables, new Predicate() {
+      @Override
+      public boolean evaluate(Object tableName) {
+        return 
!tableName.toString().toLowerCase().startsWith(TMP_TABLE_PREFIX);
+      }
+    });
+    return allTables;
+  }
+
   private Iterable<? extends String> matchesDb(String dbPattern) throws 
HiveException {
     if (dbPattern == null) {
       return db.getAllDatabases();

http://git-wip-us.apache.org/repos/asf/hive/blob/4e162e01/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
index 1396a94..80865bd 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestReplicationSemanticAnalyzer.java
@@ -17,6 +17,9 @@
  */
 package org.apache.hadoop.hive.ql.parse;
 
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.hasItems;
+import static org.hamcrest.CoreMatchers.is;
 import static org.junit.Assert.*;
 
 import java.io.Serializable;
@@ -24,8 +27,7 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
 
-import junit.framework.Assert;
-
+import com.google.common.collect.ImmutableList;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
@@ -52,7 +54,6 @@ public class TestReplicationSemanticAnalyzer {
   ParseDriver pd;
   SemanticAnalyzer sA;
 
-
   @BeforeClass
   public static void initialize() throws HiveException {
     queryState = new QueryState(new HiveConf(SemanticAnalyzer.class));
@@ -272,4 +273,19 @@ public class TestReplicationSemanticAnalyzer {
     FetchTask fetchTask = rs.getFetchTask();
     assertNotNull(fetchTask);
   }
+
+  @Test
+  public void removeTemporaryTablesForMetadataDump() {
+    List<String> validTables = ImmutableList.copyOf(
+        ReplicationSemanticAnalyzer.removeValuesTemporaryTables(new 
ArrayList<String>() {{
+          add(SemanticAnalyzer.VALUES_TMP_TABLE_NAME_PREFIX + "a");
+          add(SemanticAnalyzer.VALUES_TMP_TABLE_NAME_PREFIX + "b");
+          add(SemanticAnalyzer.VALUES_TMP_TABLE_NAME_PREFIX + "c");
+          add("c");
+          add("b");
+          add("a");
+        }}));
+    assertThat(validTables.size(), is(equalTo(3)));
+    assertThat(validTables, hasItems("a", "b", "c"));
+  }
 }

Reply via email to