http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
index 52c105f..98da309 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
@@ -36,10 +36,7 @@ public class DDLWork implements Serializable {
   // TODO: this can probably be replaced with much less code via dynamic 
dispatch and/or templates.
   private PreInsertTableDesc preInsertTableDesc;
   private InsertTableDesc insertTableDesc;
-  private CreateIndexDesc createIndexDesc;
-  private AlterIndexDesc alterIndexDesc;
   private AlterMaterializedViewDesc alterMVDesc;
-  private DropIndexDesc dropIdxDesc;
   private CreateDatabaseDesc createDatabaseDesc;
   private SwitchDatabaseDesc switchDatabaseDesc;
   private DropDatabaseDesc dropDatabaseDesc;
@@ -71,7 +68,6 @@ public class DDLWork implements Serializable {
   private AlterTableSimpleDesc alterTblSimpleDesc;
   private MsckDesc msckDesc;
   private ShowTableStatusDesc showTblStatusDesc;
-  private ShowIndexesDesc showIndexesDesc;
   private DescDatabaseDesc descDbDesc;
   private AlterDatabaseDesc alterDbDesc;
   private AlterTableAlterPartDesc alterTableAlterPartDesc;
@@ -125,16 +121,6 @@ public class DDLWork implements Serializable {
     this.outputs = outputs;
   }
 
-  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
-      CreateIndexDesc createIndex) {
-    this(inputs, outputs);
-    this.createIndexDesc = createIndex;
-  }
-
-  public DDLWork(AlterIndexDesc alterIndex) {
-    this.alterIndexDesc = alterIndex;
-  }
-
   /**
    * @param createDatabaseDesc
    *          Create Database descriptor
@@ -209,16 +195,6 @@ public class DDLWork implements Serializable {
   }
 
   /**
-   * @param alterIdxDesc
-   *          alter index descriptor
-   */
-  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
-      AlterIndexDesc alterIndexDesc) {
-    this(inputs, outputs);
-    this.alterIndexDesc = alterIndexDesc;
-  }
-
-  /**
    * @param alterMVDesc
    *          alter materialized view descriptor
    */
@@ -492,12 +468,6 @@ public class DDLWork implements Serializable {
   }
 
   public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
-      DropIndexDesc dropIndexDesc) {
-    this(inputs, outputs);
-    this.dropIdxDesc = dropIndexDesc;
-  }
-
-  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
       RoleDDLDesc roleDDLDesc) {
     this(inputs, outputs);
     this.roleDDLDesc = roleDDLDesc;
@@ -528,12 +498,6 @@ public class DDLWork implements Serializable {
   }
 
   public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
-      ShowIndexesDesc showIndexesDesc) {
-    this(inputs, outputs);
-    this.showIndexesDesc = showIndexesDesc;
-  }
-
-  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
       AlterTablePartMergeFilesDesc mergeDesc) {
     this(inputs, outputs);
     this.mergeFilesDesc = mergeDesc;
@@ -725,36 +689,6 @@ public class DDLWork implements Serializable {
   }
 
   /**
-   * @return the createIndexDesc
-   */
-  public CreateIndexDesc getCreateIndexDesc() {
-    return createIndexDesc;
-  }
-
-  /**
-   * @param createIndexDesc
-   *          the createIndexDesc to set
-   */
-  public void setCreateIndexDesc(CreateIndexDesc createIndexDesc) {
-    this.createIndexDesc = createIndexDesc;
-  }
-
-  /**
-   * @return the alterIndexDesc
-   */
-  public AlterIndexDesc getAlterIndexDesc() {
-    return alterIndexDesc;
-  }
-
-  /**
-   * @param alterIndexDesc
-   *          the alterIndexDesc to set
-   */
-  public void setAlterIndexDesc(AlterIndexDesc alterIndexDesc) {
-    this.alterIndexDesc = alterIndexDesc;
-  }
-
-  /**
    * @return the createTblDesc
    */
   @Explain(displayName = "Create Table Operator", explainLevels = { 
Level.USER, Level.DEFAULT, Level.EXTENDED })
@@ -1041,18 +975,6 @@ public class DDLWork implements Serializable {
   }
 
   /**
-   * @return the showIndexesDesc
-   */
-  @Explain(displayName = "Show Index Operator", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
-  public ShowIndexesDesc getShowIndexesDesc() {
-    return showIndexesDesc;
-  }
-
-  public void setShowIndexesDesc(ShowIndexesDesc showIndexesDesc) {
-    this.showIndexesDesc = showIndexesDesc;
-  }
-
-  /**
    * @return the descTblDesc
    */
   @Explain(displayName = "Describe Table Operator", explainLevels = { 
Level.USER, Level.DEFAULT, Level.EXTENDED })
@@ -1172,14 +1094,6 @@ public class DDLWork implements Serializable {
     this.outputs = outputs;
   }
 
-  public DropIndexDesc getDropIdxDesc() {
-    return dropIdxDesc;
-  }
-
-  public void setDropIdxDesc(DropIndexDesc dropIdxDesc) {
-    this.dropIdxDesc = dropIdxDesc;
-  }
-
   /**
    * @return role ddl desc
    */

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/plan/DropIndexDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropIndexDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/DropIndexDesc.java
deleted file mode 100644
index 58ac328..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropIndexDesc.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.plan;
-
-public class DropIndexDesc {
-  
-  private static final long serialVersionUID = 1L;
-  
-  private String indexName;
-  
-  private String tableName;
-
-  private boolean throwException;
-  
-  /**
-   * @param indexName
-   * @param tableName
-   */
-  public DropIndexDesc(String indexName, String tableName, boolean 
throwException) {
-    this.indexName = indexName;
-    this.tableName = tableName;
-    this.throwException = throwException;
-  }
-
-  /**
-   * @return index name
-   */
-  public String getIndexName() {
-    return indexName;
-  }
-
-  /**
-   * @param indexName index name
-   */
-  public void setIndexName(String indexName) {
-    this.indexName = indexName;
-  }
-
-  /**
-   * @return table name
-   */
-  public String getTableName() {
-    return tableName;
-  }
-
-  /**
-   * @param tableName table name
-   */
-  public void setTableName(String tableName) {
-    this.tableName = tableName;
-  }
-
-  public boolean isThrowException() {
-    return throwException;
-  }
-
-  public void setThrowException(boolean throwException) {
-    this.throwException = throwException;
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
index 3938bd5..a9e5c8c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
@@ -71,7 +71,6 @@ public enum HiveOperation {
   SHOW_CREATEDATABASE("SHOW_CREATEDATABASE", new 
Privilege[]{Privilege.SELECT}, null),
   SHOW_CREATETABLE("SHOW_CREATETABLE", new Privilege[]{Privilege.SELECT}, 
null),
   SHOWFUNCTIONS("SHOWFUNCTIONS", null, null, true, false),
-  SHOWINDEXES("SHOWINDEXES", null, null, true, false),
   SHOWPARTITIONS("SHOWPARTITIONS", null, null),
   SHOWLOCKS("SHOWLOCKS", null, null, true, false),
   SHOWCONF("SHOWCONF", null, null),
@@ -89,9 +88,6 @@ public enum HiveOperation {
   DROP_MATERIALIZED_VIEW("DROP_MATERIALIZED_VIEW", null, new 
Privilege[]{Privilege.DROP}),
   ALTER_MATERIALIZED_VIEW_REWRITE("ALTER_MATERIALIZED_VIEW_REWRITE",
       new Privilege[]{Privilege.ALTER_METADATA}, null),
-  CREATEINDEX("CREATEINDEX", null, null),
-  DROPINDEX("DROPINDEX", null, null),
-  ALTERINDEX_REBUILD("ALTERINDEX_REBUILD", null, null),
   ALTERVIEW_PROPERTIES("ALTERVIEW_PROPERTIES", null, null),
   DROPVIEW_PROPERTIES("DROPVIEW_PROPERTIES", null, null),
   LOCKTABLE("LOCKTABLE",  new Privilege[]{Privilege.LOCK}, null),
@@ -114,7 +110,6 @@ public enum HiveOperation {
   TRUNCATETABLE("TRUNCATETABLE", null, new Privilege[]{Privilege.DROP}),
   CREATETABLE_AS_SELECT("CREATETABLE_AS_SELECT", new 
Privilege[]{Privilege.SELECT}, new Privilege[]{Privilege.CREATE}),
   QUERY("QUERY", new Privilege[]{Privilege.SELECT}, new 
Privilege[]{Privilege.ALTER_DATA, Privilege.CREATE}, true, false),
-  ALTERINDEX_PROPS("ALTERINDEX_PROPS",null, null),
   ALTERDATABASE("ALTERDATABASE", null, null),
   ALTERDATABASE_OWNER("ALTERDATABASE_OWNER", null, null),
   ALTERDATABASE_LOCATION("ALTERDATABASE_LOCATION", new 
Privilege[]{Privilege.ALTER_DATA}, null),

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
index 9298630..f147309 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/MapWork.java
@@ -118,8 +118,6 @@ public class MapWork extends BaseWork {
 
   private String inputformat;
 
-  private String indexIntermediateFile;
-
   private Integer numMapTasks;
   private Long maxSplitSize;
   private Long minSplitSize;
@@ -587,10 +585,6 @@ public class MapWork extends BaseWork {
     return this.mapperCannotSpanPartns;
   }
 
-  public String getIndexIntermediateFile() {
-    return indexIntermediateFile;
-  }
-
   public ArrayList<String> getAliases() {
     return new ArrayList<String>(aliasToWork.keySet());
   }
@@ -641,14 +635,6 @@ public class MapWork extends BaseWork {
     return sortedColsByDirectory;
   }
 
-  public void addIndexIntermediateFile(String fileName) {
-    if (this.indexIntermediateFile == null) {
-      this.indexIntermediateFile = fileName;
-    } else {
-      this.indexIntermediateFile += "," + fileName;
-    }
-  }
-
   public int getSamplingType() {
     return samplingType;
   }

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java
deleted file mode 100644
index e18a94c..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowIndexesDesc.java
+++ /dev/null
@@ -1,75 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.plan;
-
-import java.io.Serializable;
-
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-
-
-/**
- * ShowIndexesDesc.
- * Returns table index information per SQL syntax.
- */
-@Explain(displayName = "Show Indexes", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
-public class ShowIndexesDesc extends DDLDesc implements Serializable {
-  private static final long serialVersionUID = 1L;
-  String tableName;
-  String resFile;
-  boolean isFormatted;
-
-  /**
-   * thrift ddl for the result of show indexes.
-   */
-  private static final String schema = 
"idx_name,tab_name,col_names,idx_tab_name,idx_type,comment"
-                                        + 
"#string:string:string:string:string:string";
-
-  public static String getSchema() {
-    return schema;
-  }
-
-  public String getTableName() {
-    return tableName;
-  }
-
-  public String getResFile() {
-    return resFile;
-  }
-
-  public boolean isFormatted() {
-    return isFormatted;
-  }
-
-  public void setFormatted(boolean isFormatted) {
-    this.isFormatted = isFormatted;
-  }
-
-  /**
-   *
-   * @param tableName
-   *          Name of the table whose indexes need to be listed.
-   * @param resFile
-   *          File to store the results in.
-   */
-  public ShowIndexesDesc(String tableName, Path resFile) {
-    this.tableName = tableName;
-    this.resFile = resFile.toString();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
index 2accad3..efbd858 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ppd/OpProcFactory.java
@@ -653,6 +653,7 @@ public final class OpProcFactory {
   }
 
   public static class ReduceSinkPPD extends DefaultPPD implements 
NodeProcessor {
+    @Override
     public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
                           Object... nodeOutputs) throws SemanticException {
       super.process(nd, stack, procCtx, nodeOutputs);
@@ -790,7 +791,9 @@ public final class OpProcFactory {
      * @param ewi
      */
     protected void logExpr(Node nd, ExprWalkerInfo ewi) {
-      if (!LOG.isDebugEnabled()) return;
+      if (!LOG.isDebugEnabled()) {
+        return;
+      }
       for (Entry<String, List<ExprNodeDesc>> e : 
ewi.getFinalCandidates().entrySet()) {
         StringBuilder sb = new StringBuilder("Pushdown predicates of 
").append(nd.getName())
             .append(" for alias ").append(e.getKey()).append(": ");

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
index 6000590..1e9c639 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/Privilege.java
@@ -101,9 +101,6 @@ public class Privilege {
   public static Privilege DROP = new Privilege(PrivilegeType.DROP,
       PrivilegeScope.ALLSCOPE_EXCEPT_COLUMN);
 
-  public static Privilege INDEX = new Privilege(PrivilegeType.INDEX,
-      PrivilegeScope.ALLSCOPE);
-
   public static Privilege LOCK = new Privilege(PrivilegeType.LOCK,
       PrivilegeScope.ALLSCOPE_EXCEPT_COLUMN);
 

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeRegistry.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeRegistry.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeRegistry.java
index 3040938..27c7986 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeRegistry.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeRegistry.java
@@ -48,7 +48,6 @@ public class PrivilegeRegistry {
     Registry.put(Privilege.ALTER_METADATA.getPriv(), Privilege.ALTER_METADATA);
     Registry.put(Privilege.CREATE.getPriv(), Privilege.CREATE);
     Registry.put(Privilege.DROP.getPriv(), Privilege.DROP);
-    Registry.put(Privilege.INDEX.getPriv(), Privilege.INDEX);
     Registry.put(Privilege.LOCK.getPriv(), Privilege.LOCK);
     Registry.put(Privilege.SELECT.getPriv(), Privilege.SELECT);
     Registry.put(Privilege.SHOW_DATABASE.getPriv(),

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeType.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeType.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeType.java
index 56b6bf6..7678e8f 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeType.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeType.java
@@ -33,7 +33,6 @@ public enum PrivilegeType {
   ALTER_METADATA(HiveParser.TOK_PRIV_ALTER_METADATA, "Alter"),
   CREATE(HiveParser.TOK_PRIV_CREATE, "Create"),
   DROP(HiveParser.TOK_PRIV_DROP, "Drop"),
-  INDEX(HiveParser.TOK_PRIV_INDEX, "Index"),
   LOCK(HiveParser.TOK_PRIV_LOCK, "Lock"),
   SELECT(HiveParser.TOK_PRIV_SELECT, "Select"),
   SHOW_DATABASE(HiveParser.TOK_PRIV_SHOW_DATABASE, "Show_Database"),

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
index d2f1716..b66d188 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/StorageBasedAuthorizationProvider.java
@@ -18,7 +18,6 @@
 
 package org.apache.hadoop.hive.ql.security.authorization;
 
-import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.security.AccessControlException;
 import java.util.ArrayList;
@@ -293,9 +292,6 @@ public class StorageBasedAuthorizationProvider extends 
HiveAuthorizationProvider
       return FsAction.WRITE;
     case DROP:
       return FsAction.WRITE;
-    case INDEX:
-      throw new AuthorizationException(
-          "StorageBasedAuthorizationProvider cannot handle INDEX privilege");
     case LOCK:
       throw new AuthorizationException(
           "StorageBasedAuthorizationProvider cannot handle LOCK privilege");

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java
deleted file mode 100644
index aa5be09..0000000
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/AbstractGenericUDFEWAHBitmapBop.java
+++ /dev/null
@@ -1,146 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.udf.generic;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import javaewah.EWAHCompressedBitmap;
-
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectInput;
-import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectOutput;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.io.LongWritable;
-
-/**
- * An abstract class for a UDF that performs a binary operation between two 
EWAH-compressed bitmaps.
- * For example: Bitmap OR and AND operations between two EWAH-compressed 
bitmaps.
- */
-abstract public class AbstractGenericUDFEWAHBitmapBop extends GenericUDF {
-  protected final ArrayList<Object> ret = new ArrayList<Object>();
-  private transient ObjectInspector b1OI;
-  private final String name;
-
-  AbstractGenericUDFEWAHBitmapBop(String name) {
-    this.name = name;
-  }
-
-  @Override
-  public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
-    if (arguments.length != 2) {
-      throw new UDFArgumentLengthException(
-        "The function " + name + "(b1, b2) takes exactly 2 arguments");
-    }
-
-    if (arguments[0].getCategory().equals(Category.LIST)) {
-      b1OI = (ListObjectInspector) arguments[0];
-    } else {
-        throw new UDFArgumentTypeException(0, "\""
-          + Category.LIST.toString().toLowerCase()
-          + "\" is expected at function " + name + ", but \""
-          + arguments[0].getTypeName() + "\" is found");
-    }
-
-    if (!arguments[1].getCategory().equals(Category.LIST)) {
-        throw new UDFArgumentTypeException(1, "\""
-          + Category.LIST.toString().toLowerCase()
-          + "\" is expected at function " + name + ", but \""
-          + arguments[1].getTypeName() + "\" is found");
-
-    }
-    return ObjectInspectorFactory
-        .getStandardListObjectInspector(PrimitiveObjectInspectorFactory
-            .writableLongObjectInspector);
-  }
-
-  protected abstract EWAHCompressedBitmap bitmapBop(
-    EWAHCompressedBitmap bitmap1, EWAHCompressedBitmap bitmap2);
-
-  @Override
-  public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    assert (arguments.length == 2);
-    Object b1 = arguments[0].get();
-    Object b2 = arguments[1].get();
-
-    EWAHCompressedBitmap bitmap1 = wordArrayToBitmap(b1);
-    EWAHCompressedBitmap bitmap2 = wordArrayToBitmap(b2);
-
-    EWAHCompressedBitmap bitmapAnd = bitmapBop(bitmap1, bitmap2);
-
-    BitmapObjectOutput bitmapObjOut = new BitmapObjectOutput();
-    try {
-      bitmapAnd.writeExternal(bitmapObjOut);
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-    ret.clear();
-    List<LongWritable> retList = bitmapToWordArray(bitmapAnd);
-    for (LongWritable l : retList) {
-      ret.add(l);
-    }
-    return ret;
-  }
-  
-  protected EWAHCompressedBitmap wordArrayToBitmap(Object b) {
-    ListObjectInspector lloi = (ListObjectInspector) b1OI;
-    int length = lloi.getListLength(b);
-    ArrayList<LongWritable> bitmapArray = new ArrayList<LongWritable>();
-    for (int i = 0; i < length; i++) {
-      long l = PrimitiveObjectInspectorUtils.getLong(
-          lloi.getListElement(b, i), 
-          (PrimitiveObjectInspector) lloi.getListElementObjectInspector());
-      bitmapArray.add(new LongWritable(l));
-    }
-
-    BitmapObjectInput bitmapObjIn = new BitmapObjectInput(bitmapArray);
-    EWAHCompressedBitmap bitmap = new EWAHCompressedBitmap();
-    try {
-      bitmap.readExternal(bitmapObjIn);
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-    return bitmap;
-  }
-
-  protected List<LongWritable> bitmapToWordArray(EWAHCompressedBitmap bitmap) {
-    BitmapObjectOutput bitmapObjOut = new BitmapObjectOutput();
-    try {
-      bitmap.writeExternal(bitmapObjOut);
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-    return bitmapObjOut.list();
-  }
-  
-  @Override
-  public String getDisplayString(String[] children) {
-    return getStandardDisplayString(name, children, ",");
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java
deleted file mode 100644
index fabeecc..0000000
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDAFEWAHBitmap.java
+++ /dev/null
@@ -1,193 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.udf.generic;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-import javaewah.EWAHCompressedBitmap;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectInput;
-import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectOutput;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import 
org.apache.hadoop.hive.serde2.objectinspector.StandardListObjectInspector;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.util.StringUtils;
-
-
-/**
- * GenericUDAFEWAHBitmap.
- *
- */
-@Description(name = "ewah_bitmap", value = "_FUNC_(expr) - Returns an 
EWAH-compressed bitmap representation of a column.")
-public class GenericUDAFEWAHBitmap extends AbstractGenericUDAFResolver {
-
-  static final Logger LOG = 
LoggerFactory.getLogger(GenericUDAFEWAHBitmap.class.getName());
-
-  @Override
-  public GenericUDAFEvaluator getEvaluator(TypeInfo[] parameters)
-    throws SemanticException {
-    if (parameters.length != 1) {
-      throw new UDFArgumentTypeException(parameters.length - 1,
-          "Exactly one argument is expected.");
-    }
-    ObjectInspector oi = 
TypeInfoUtils.getStandardJavaObjectInspectorFromTypeInfo(parameters[0]);
-    if (!ObjectInspectorUtils.compareSupported(oi)) {
-      throw new UDFArgumentTypeException(parameters.length - 1,
-          "Cannot support comparison of map<> type or complex type containing 
map<>.");
-    }
-    return new GenericUDAFEWAHBitmapEvaluator();
-  }
-
-  //The UDAF evaluator assumes that all rows it's evaluating have
-  //the same (desired) value.
-  public static class GenericUDAFEWAHBitmapEvaluator extends 
GenericUDAFEvaluator {
-
-    // For PARTIAL1 and COMPLETE: ObjectInspectors for original data
-    private PrimitiveObjectInspector inputOI;
-
-    // For PARTIAL2 and FINAL: ObjectInspectors for partial aggregations
-    // (lists of bitmaps)
-    private transient StandardListObjectInspector loi;
-    private transient StandardListObjectInspector internalMergeOI;
-
-    @Override
-    public ObjectInspector init(Mode m, ObjectInspector[] parameters)
-        throws HiveException {
-      super.init(m, parameters);
-      // init output object inspectors
-      // The output of a partial aggregation is a list
-      if (m == Mode.PARTIAL1) {
-        inputOI = (PrimitiveObjectInspector) parameters[0];
-        return ObjectInspectorFactory
-            
.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
-      } else if (m == Mode.PARTIAL2 || m == Mode.FINAL) {
-        internalMergeOI = (StandardListObjectInspector) parameters[0];
-        inputOI = 
(PrimitiveObjectInspector)internalMergeOI.getListElementObjectInspector();
-        loi = (StandardListObjectInspector) ObjectInspectorFactory
-            
.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
-        return loi;
-      } else { // Mode.COMPLETE, ie. no map-side aggregation, requires ordering
-        inputOI = (PrimitiveObjectInspector)parameters[0];
-        loi = (StandardListObjectInspector) ObjectInspectorFactory
-            
.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableLongObjectInspector);
-        return loi;
-      }
-    }
-
-    /** class for storing the current partial result aggregation */
-    @AggregationType(estimable = true)
-    static class BitmapAgg extends AbstractAggregationBuffer {
-      EWAHCompressedBitmap bitmap;
-      @Override
-      public int estimate() {
-        return bitmap.sizeInBytes();
-      }
-    }
-
-    @Override
-    public void reset(AggregationBuffer agg) throws HiveException {
-
-        ((BitmapAgg) agg).bitmap = new EWAHCompressedBitmap();
-    }
-
-    @Override
-    public AggregationBuffer getNewAggregationBuffer() throws HiveException {
-      BitmapAgg result = new BitmapAgg();
-      reset(result);
-      return result;
-    }
-
-    @Override
-    public void iterate(AggregationBuffer agg, Object[] parameters)
-        throws HiveException {
-        assert (parameters.length == 1);
-        Object p = parameters[0];
-        if (p != null) {
-            BitmapAgg myagg = (BitmapAgg) agg;
-            try {
-                int row = PrimitiveObjectInspectorUtils.getInt(p, inputOI);
-                addBitmap(row, myagg);
-            } catch (NumberFormatException e) {
-                LOG.warn(getClass().getSimpleName() + " " +
-                        StringUtils.stringifyException(e));
-            }
-        }
-    }
-
-
-    @Override
-    public Object terminate(AggregationBuffer agg) throws HiveException {
-        BitmapAgg myagg = (BitmapAgg) agg;
-
-        BitmapObjectOutput bitmapObjOut = new BitmapObjectOutput();
-        try {
-          myagg.bitmap.writeExternal(bitmapObjOut);
-        } catch (IOException e) {
-          throw new RuntimeException(e);
-        }
-        return bitmapObjOut.list();
-    }
-
-    @Override
-    public void merge(AggregationBuffer agg, Object partial)
-        throws HiveException {
-      BitmapAgg myagg = (BitmapAgg) agg;
-      ArrayList<LongWritable> partialResult = (ArrayList<LongWritable>) 
internalMergeOI.getList(partial);
-      BitmapObjectInput bitmapObjIn = new BitmapObjectInput(partialResult);
-      EWAHCompressedBitmap partialBitmap = new EWAHCompressedBitmap();
-      try {
-        partialBitmap.readExternal(bitmapObjIn);
-      } catch (IOException e) {
-        throw new RuntimeException(e);
-      }
-      myagg.bitmap = myagg.bitmap.or(partialBitmap);
-    }
-
-    @Override
-    public Object terminatePartial(AggregationBuffer agg) throws HiveException 
{
-      BitmapAgg myagg = (BitmapAgg) agg;
-      BitmapObjectOutput bitmapObjOut = new BitmapObjectOutput();
-      try {
-        myagg.bitmap.writeExternal(bitmapObjOut);
-      } catch (IOException e) {
-        throw new RuntimeException(e);
-      }
-      return bitmapObjOut.list();
-    }
-
-    private void addBitmap(int newRow, BitmapAgg myagg) {
-        if (!myagg.bitmap.set(newRow)) {
-          throw new RuntimeException("Can't set bits out of order with 
EWAHCompressedBitmap");
-        }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java
deleted file mode 100644
index 976fa18..0000000
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapAnd.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.udf.generic;
-
-import javaewah.EWAHCompressedBitmap;
-
-import org.apache.hadoop.hive.ql.exec.Description;
-
-/**
- * GenericEWAHUDFBitmapAnd.
- *
- */
-@Description(name = "ewah_bitmap_and",
-  value = "_FUNC_(b1, b2) - Return an EWAH-compressed bitmap that is the 
bitwise AND of two bitmaps.")
-public class GenericUDFEWAHBitmapAnd extends AbstractGenericUDFEWAHBitmapBop {
-
-  public GenericUDFEWAHBitmapAnd() {
-    super("EWAH_BITMAP_AND");
-  }
-
-  @Override
-  protected EWAHCompressedBitmap bitmapBop(
-      EWAHCompressedBitmap bitmap1, EWAHCompressedBitmap bitmap2) {
-    return bitmap1.and(bitmap2);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java
deleted file mode 100644
index aab6e82..0000000
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapEmpty.java
+++ /dev/null
@@ -1,100 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.udf.generic;
-
-import java.io.IOException;
-import java.util.ArrayList;
-
-import javaewah.EWAHCompressedBitmap;
-
-import org.apache.hadoop.hive.ql.exec.Description;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
-import org.apache.hadoop.hive.ql.index.bitmap.BitmapObjectInput;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.LongWritable;
-
-@Description(name = "ewah_bitmap_empty", value = "_FUNC_(bitmap) - "
-    + "Predicate that tests whether an EWAH-compressed bitmap is all zeros ")
-public class GenericUDFEWAHBitmapEmpty extends GenericUDF {
-  private transient ObjectInspector bitmapOI;
-  private transient BooleanObjectInspector boolOI;
-
-@Override
-public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
-  if (arguments.length != 1) {
-    throw new UDFArgumentLengthException(
-      "The function EWAH_BITMAP_EMPTY(b) takes exactly 1 argument");
-  }
-
-  if (arguments[0].getCategory().equals(Category.LIST)) {
-    bitmapOI = (ListObjectInspector) arguments[0];
-  } else {
-      throw new UDFArgumentTypeException(0, "\""
-        + Category.LIST.toString().toLowerCase()
-        + "\" is expected at function EWAH_BITMAP_EMPTY, but \""
-        + arguments[0].getTypeName() + "\" is found");
-  }
-
-  boolOI = PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
-  return boolOI;
-  }
-
-  @Override
-  public Object evaluate(DeferredObject[] arguments) throws HiveException {
-    assert (arguments.length == 1);
-    Object b = arguments[0].get();
-
-    ListObjectInspector lloi = (ListObjectInspector) bitmapOI;
-    int length = lloi.getListLength(b);
-    ArrayList<LongWritable> bitmapArray = new ArrayList<LongWritable>();
-    for (int i = 0; i < length; i++) {
-      long l = PrimitiveObjectInspectorUtils.getLong(
-          lloi.getListElement(b, i),
-          (PrimitiveObjectInspector) lloi.getListElementObjectInspector());
-      bitmapArray.add(new LongWritable(l));
-    }
-
-    BitmapObjectInput bitmapObjIn = new BitmapObjectInput(bitmapArray);
-    EWAHCompressedBitmap bitmap = new EWAHCompressedBitmap();
-    try {
-      bitmap.readExternal(bitmapObjIn);
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-
-     // Add return true only if bitmap is all zeros.
-     return new BooleanWritable(!bitmap.iterator().hasNext());
-  }
-
-
-  @Override
-  public String getDisplayString(String[] children) {
-    return getStandardDisplayString("EWAH_BITMAP_EMPTY", children);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java
deleted file mode 100644
index 33d6be6..0000000
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFEWAHBitmapOr.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.udf.generic;
-
-import javaewah.EWAHCompressedBitmap;
-
-import org.apache.hadoop.hive.ql.exec.Description;
-
-/**
- * GenericUDFEWAHBitmapOr.
- *
- */
-@Description(name = "ewah_bitmap_or",
-  value = "_FUNC_(b1, b2) - Return an EWAH-compressed bitmap that is the 
bitwise OR of two bitmaps.")
-public class GenericUDFEWAHBitmapOr extends AbstractGenericUDFEWAHBitmapBop {
-
-  public GenericUDFEWAHBitmapOr() {
-    super("EWAH_BITMAP_OR");
-  }
-
-  @Override
-  protected EWAHCompressedBitmap bitmapBop(
-      EWAHCompressedBitmap bitmap1, EWAHCompressedBitmap bitmap2) {
-    return bitmap1.or(bitmap2);
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/org/apache/hadoop/hive/ql/index/MockIndexResult.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/index/MockIndexResult.java 
b/ql/src/test/org/apache/hadoop/hive/ql/index/MockIndexResult.java
deleted file mode 100644
index 808cb6a..0000000
--- a/ql/src/test/org/apache/hadoop/hive/ql/index/MockIndexResult.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.index;
-
-import com.google.common.collect.ImmutableSet;
-import java.util.Collection;
-import org.apache.hadoop.hive.ql.io.HiveInputFormat.HiveInputSplit;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.mapred.FileSplit;
-
-public final class MockIndexResult implements IndexResult {
-
-  private final ImmutableSet<HiveInputSplit> selectedSplits;
-
-  public MockIndexResult(Collection<HiveInputSplit> selectedSplits) {
-    this.selectedSplits = ImmutableSet.copyOf(selectedSplits);
-  }
-
-  @Override
-  public boolean contains(FileSplit split) throws HiveException {
-    return selectedSplits.contains(split);
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/org/apache/hadoop/hive/ql/index/MockInputFile.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/index/MockInputFile.java 
b/ql/src/test/org/apache/hadoop/hive/ql/index/MockInputFile.java
index 4804e36..405efdf 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/index/MockInputFile.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/index/MockInputFile.java
@@ -74,7 +74,8 @@ public final class MockInputFile {
       DefaultSplitLengthStep {
 
     private String path;
-    private long defaultSplitSize = SplitFilterTestCase.DEFAULT_SPLIT_SIZE;;
+    public static final long DEFAULT_SPLIT_SIZE = 1024 * 1024;
+    private long defaultSplitSize = DEFAULT_SPLIT_SIZE;
     private final List<HiveInputSplit> splits = new ArrayList<>();
     private final List<HiveInputSplit> selectedSplits = new ArrayList<>();
     private long position = 0;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/org/apache/hadoop/hive/ql/index/SplitFilterTestCase.java
----------------------------------------------------------------------
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/index/SplitFilterTestCase.java 
b/ql/src/test/org/apache/hadoop/hive/ql/index/SplitFilterTestCase.java
deleted file mode 100644
index fdd0731..0000000
--- a/ql/src/test/org/apache/hadoop/hive/ql/index/SplitFilterTestCase.java
+++ /dev/null
@@ -1,153 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.index;
-
-import com.google.common.collect.ImmutableSet;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-import org.apache.hadoop.hive.ql.io.HiveInputFormat.HiveInputSplit;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.fail;
-
-public final class SplitFilterTestCase {
-  public static final long DEFAULT_SPLIT_SIZE = 1024 * 1024;
-  public static final long SMALL_SPLIT_SIZE = 500;
-
-  private final Set<HiveInputSplit> allSplits;
-  private final Set<HiveInputSplit> selectedSplits;
-  private final Set<HiveInputSplit> expectedSplits;
-  private final long maxInputSize;
-
-  private SplitFilterTestCase(Iterable<HiveInputSplit> allSplits,
-      Iterable<HiveInputSplit> selectedSplits, Iterable<HiveInputSplit> 
expectedSplits,
-      long maxInputSize) {
-
-    this.allSplits = ImmutableSet.copyOf(allSplits);
-    this.selectedSplits = ImmutableSet.copyOf(selectedSplits);
-    this.expectedSplits = ImmutableSet.copyOf(expectedSplits);
-    this.maxInputSize = maxInputSize;
-  }
-
-  private HiveInputSplit[] toArray(Collection<HiveInputSplit> splits) {
-    return splits.toArray(new HiveInputSplit[splits.size()]);
-  }
-
-  public void executeAndValidate() throws IOException {
-    SplitFilter filter = new SplitFilter(new MockIndexResult(selectedSplits), 
maxInputSize);
-    List<HiveInputSplit> actualSplits = filter.filter(toArray(allSplits));
-    assertSplits(expectedSplits, actualSplits);
-  }
-
-  private void assertSplits(Collection<HiveInputSplit> expectedSplits,
-      Collection<HiveInputSplit> actualSplits) {
-    SplitFilter.HiveInputSplitComparator hiveInputSplitComparator =
-        new SplitFilter.HiveInputSplitComparator();
-
-    List<HiveInputSplit> sortedExpectedSplits = new 
ArrayList<>(expectedSplits);
-    Collections.sort(sortedExpectedSplits, hiveInputSplitComparator);
-
-    List<HiveInputSplit> sortedActualSplits = new ArrayList<>(actualSplits);
-    Collections.sort(sortedActualSplits, hiveInputSplitComparator);
-
-    assertEquals("Number of selected splits.", sortedExpectedSplits.size(),
-        sortedActualSplits.size());
-
-    for (int i = 0; i < sortedExpectedSplits.size(); i++) {
-      HiveInputSplit expectedSplit = sortedExpectedSplits.get(i);
-      HiveInputSplit actualSplit = sortedActualSplits.get(i);
-
-      String splitName = "Split #" + i;
-
-      assertEquals(splitName + " path.", expectedSplit.getPath(), 
actualSplit.getPath());
-      assertEquals(splitName + " start.", expectedSplit.getStart(), 
actualSplit.getStart());
-      assertEquals(splitName + " length.", expectedSplit.getLength(), 
actualSplit.getLength());
-    }
-  }
-
-  public static MaxInputSizeStep builder() {
-    return new SplitFilterTestCaseBuilder();
-  }
-
-  public static interface MaxInputSizeStep extends InputFilesStep {
-    InputFilesStep maxInputSize(long maxInputSize);
-  }
-
-  public static interface InputFilesStep {
-    ExpectedSplitsStep inputFiles(MockInputFile... inputFiles);
-  }
-
-  public static interface ExpectedSplitsStep {
-    BuildStep expectedSplits(HiveInputSplit... expectedSplits);
-  }
-
-  public static interface BuildStep {
-    SplitFilterTestCase build();
-  }
-
-  private static final class SplitFilterTestCaseBuilder implements 
MaxInputSizeStep, InputFilesStep,
-      ExpectedSplitsStep, BuildStep {
-
-    private long maxInputSize = Long.MAX_VALUE;
-    private List<MockInputFile> inputFiles;
-    private List<HiveInputSplit> expectedSplits;
-
-    @Override
-    public InputFilesStep maxInputSize(long maxInputSize) {
-      this.maxInputSize = maxInputSize;
-      return this;
-    }
-
-    @Override
-    public ExpectedSplitsStep inputFiles(MockInputFile... inputFiles) {
-      this.inputFiles = Arrays.asList(inputFiles);
-      return this;
-    }
-
-    @Override
-    public BuildStep expectedSplits(HiveInputSplit... expectedSplits) {
-      this.expectedSplits = Arrays.asList(expectedSplits);
-      return this;
-    }
-
-    @Override
-    public SplitFilterTestCase build() {
-      List<HiveInputSplit> allSplits = new ArrayList<>();
-      List<HiveInputSplit> selectedSplits = new ArrayList<>();
-      Set<String> seenPaths = new HashSet<String>();
-
-      for (MockInputFile inputFile : inputFiles) {
-        if (seenPaths.add(inputFile.getPath())) {
-          allSplits.addAll(inputFile.getSplits());
-          selectedSplits.addAll(inputFile.getSelectedSplits());
-        } else {
-          fail(String.format("Cannot add 2 input files with the same path to a 
test case. " +
-              "The duplicated path is '%s'.", inputFile.getPath()));
-        }
-      }
-
-      return new SplitFilterTestCase(allSplits, selectedSplits, 
expectedSplits, maxInputSize);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/org/apache/hadoop/hive/ql/index/TestHiveInputSplitComparator.java
----------------------------------------------------------------------
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/index/TestHiveInputSplitComparator.java 
b/ql/src/test/org/apache/hadoop/hive/ql/index/TestHiveInputSplitComparator.java
index 3fc18e9..feb5ea9 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/index/TestHiveInputSplitComparator.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/index/TestHiveInputSplitComparator.java
@@ -18,8 +18,9 @@
 package org.apache.hadoop.hive.ql.index;
 
 import java.util.Arrays;
+
 import org.apache.hadoop.hive.ql.io.HiveInputFormat.HiveInputSplit;
-import org.apache.hadoop.hive.ql.index.SplitFilter.HiveInputSplitComparator;
+import org.apache.hadoop.hive.ql.io.HiveInputFormat.HiveInputSplitComparator;
 import org.junit.Test;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/org/apache/hadoop/hive/ql/index/TestIndexType.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/index/TestIndexType.java 
b/ql/src/test/org/apache/hadoop/hive/ql/index/TestIndexType.java
deleted file mode 100644
index befb103..0000000
--- a/ql/src/test/org/apache/hadoop/hive/ql/index/TestIndexType.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.index;
-
-import junit.framework.TestCase;
-import org.apache.hadoop.hive.ql.index.bitmap.BitmapIndexHandler;
-import org.apache.hadoop.hive.ql.index.compact.CompactIndexHandler;
-import org.junit.Test;
-
-public class TestIndexType extends TestCase {
-
-    @Test
-    public void testIndexTypeHandlers(){
-        assertEquals(HiveIndex.IndexType.AGGREGATE_TABLE.getHandlerClsName(), 
AggregateIndexHandler.class.getName());
-        assertEquals(HiveIndex.IndexType.BITMAP_TABLE.getHandlerClsName(), 
BitmapIndexHandler.class.getName());
-        
assertEquals(HiveIndex.IndexType.COMPACT_SUMMARY_TABLE.getHandlerClsName(), 
CompactIndexHandler.class.getName());
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/org/apache/hadoop/hive/ql/index/TestSplitFilter.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/index/TestSplitFilter.java 
b/ql/src/test/org/apache/hadoop/hive/ql/index/TestSplitFilter.java
deleted file mode 100644
index b5114e9..0000000
--- a/ql/src/test/org/apache/hadoop/hive/ql/index/TestSplitFilter.java
+++ /dev/null
@@ -1,296 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hive.ql.index;
-
-import java.io.IOException;
-import org.junit.Test;
-
-import static 
org.apache.hadoop.hive.ql.index.MockHiveInputSplits.createMockSplit;
-import static org.apache.hadoop.io.SequenceFile.SYNC_INTERVAL;
-import static 
org.apache.hadoop.hive.ql.index.SplitFilterTestCase.DEFAULT_SPLIT_SIZE;
-import static 
org.apache.hadoop.hive.ql.index.SplitFilterTestCase.SMALL_SPLIT_SIZE;
-
-public class TestSplitFilter {
-  private SplitFilterTestCase testCase;
-
-  @Test
-  public void testOneSelectedSplitsInMiddle() throws Exception {
-    testCase = SplitFilterTestCase.builder()
-        .inputFiles(
-            MockInputFile.builder()
-                .path("A")
-                .split()
-                .selectedSplit()
-                .split()
-                .build()
-        )
-        .expectedSplits(
-            createMockSplit("A", DEFAULT_SPLIT_SIZE - SYNC_INTERVAL, 
DEFAULT_SPLIT_SIZE + SYNC_INTERVAL)
-        )
-        .build();
-
-    testCase.executeAndValidate();
-  }
-
-  @Test
-  public void testSelectedFirstSplit() throws Exception {
-    testCase = SplitFilterTestCase.builder()
-        .inputFiles(
-            MockInputFile.builder()
-                .path("A")
-                .selectedSplit()
-                .split()
-                .split()
-                .build()
-        )
-        .expectedSplits(
-            createMockSplit("A", 0, DEFAULT_SPLIT_SIZE)
-        )
-        .build();
-
-    testCase.executeAndValidate();
-  }
-
-  @Test
-  public void testSelectedLastSplit() throws Exception {
-    int lastSplitSize = 1234;
-
-    testCase = SplitFilterTestCase.builder()
-        .inputFiles(
-            MockInputFile.builder()
-                .path("A")
-                .split()
-                .selectedSplit(lastSplitSize)
-                .build()
-        )
-        .expectedSplits(
-            createMockSplit("A", DEFAULT_SPLIT_SIZE - SYNC_INTERVAL, 
lastSplitSize + SYNC_INTERVAL)
-        )
-        .build();
-
-    testCase.executeAndValidate();
-  }
-
-  @Test
-  public void testSelectedTwoAdjacentSplits() throws Exception {
-
-    testCase = SplitFilterTestCase.builder()
-        .inputFiles(
-            MockInputFile.builder()
-                .path("A")
-                .selectedSplit()
-                .selectedSplit()
-                .split()
-                .build()
-        )
-        .expectedSplits(
-            createMockSplit("A", 0, DEFAULT_SPLIT_SIZE),
-            createMockSplit("A", DEFAULT_SPLIT_SIZE, DEFAULT_SPLIT_SIZE)
-        )
-        .build();
-
-    testCase.executeAndValidate();
-  }
-
-  @Test
-  public void testSelectedThreeAdjacentSplits() throws Exception {
-
-    testCase = SplitFilterTestCase.builder()
-        .inputFiles(
-            MockInputFile.builder()
-                .path("A")
-                .selectedSplit()
-                .selectedSplit()
-                .selectedSplit()
-                .split()
-                .build()
-        )
-        .expectedSplits(
-            createMockSplit("A", 0, DEFAULT_SPLIT_SIZE),
-            createMockSplit("A", DEFAULT_SPLIT_SIZE, DEFAULT_SPLIT_SIZE),
-            createMockSplit("A", DEFAULT_SPLIT_SIZE * 2, DEFAULT_SPLIT_SIZE)
-        )
-        .build();
-
-    testCase.executeAndValidate();
-  }
-
-  @Test
-  public void testSelectedSplitsInTwoFiles() throws Exception {
-
-    testCase = SplitFilterTestCase.builder()
-        .inputFiles(
-            MockInputFile.builder()
-                .path("A")
-                .selectedSplit()
-                .split()
-                .build(),
-            MockInputFile.builder()
-                .path("B")
-                .selectedSplit()
-                .build()
-        )
-        .expectedSplits(
-            createMockSplit("A", 0, DEFAULT_SPLIT_SIZE),
-            createMockSplit("B", 0, DEFAULT_SPLIT_SIZE)
-        )
-        .build();
-
-    testCase.executeAndValidate();
-  }
-
-  @Test
-  public void testOverlapWithPreviousFile() throws Exception {
-
-    testCase = SplitFilterTestCase.builder()
-        .inputFiles(
-            MockInputFile.builder()
-                .path("A")
-                .selectedSplit()
-                .build(),
-            MockInputFile.builder()
-                .path("B")
-                .split()
-                .selectedSplit()
-                .build()
-        )
-        .expectedSplits(
-            createMockSplit("A", 0, DEFAULT_SPLIT_SIZE),
-            createMockSplit("B", DEFAULT_SPLIT_SIZE - SYNC_INTERVAL, 
DEFAULT_SPLIT_SIZE + SYNC_INTERVAL)
-        )
-        .build();
-
-    testCase.executeAndValidate();
-  }
-
-  @Test
-  public void testOverlapInSecondFile() throws Exception {
-
-    testCase = SplitFilterTestCase.builder()
-        .inputFiles(
-            MockInputFile.builder()
-                .path("A")
-                .selectedSplit()
-                .build(),
-            MockInputFile.builder()
-                .path("B")
-                .split()
-                .selectedSplit()
-                .selectedSplit()
-                .build()
-        )
-        .expectedSplits(
-            createMockSplit("A", 0, DEFAULT_SPLIT_SIZE),
-            createMockSplit("B", DEFAULT_SPLIT_SIZE - SYNC_INTERVAL, 
DEFAULT_SPLIT_SIZE + SYNC_INTERVAL),
-            createMockSplit("B", DEFAULT_SPLIT_SIZE * 2, DEFAULT_SPLIT_SIZE)
-        )
-        .build();
-
-    testCase.executeAndValidate();
-  }
-
-  @Test
-  public void testSmallSplitsLengthAdjustment() throws Exception {
-
-    testCase = SplitFilterTestCase.builder()
-        .inputFiles(
-            MockInputFile.builder()
-                .path("A")
-                .defaultSplitLength(SMALL_SPLIT_SIZE)
-                .split()
-                .selectedSplit()
-                .build()
-        )
-        .expectedSplits(
-            createMockSplit("A", 0, SMALL_SPLIT_SIZE * 2)
-        )
-        .build();
-
-    testCase.executeAndValidate();
-  }
-
-  @Test
-  public void testSmallSplitsOverlap() throws Exception {
-
-    testCase = SplitFilterTestCase.builder()
-        .inputFiles(
-            MockInputFile.builder()
-                .path("A")
-                .defaultSplitLength(SMALL_SPLIT_SIZE)
-                .selectedSplit()
-                .split()
-                .selectedSplit()
-                .split()
-                .selectedSplit()
-                .build()
-        )
-        .expectedSplits(
-            createMockSplit("A", 0, SMALL_SPLIT_SIZE),
-            createMockSplit("A", SMALL_SPLIT_SIZE * 2, SMALL_SPLIT_SIZE),
-            createMockSplit("A", SMALL_SPLIT_SIZE * 4, SMALL_SPLIT_SIZE)
-        )
-        .build();
-
-    testCase.executeAndValidate();
-  }
-
-  @Test
-  public void testMaxSplitsSizePositive() throws Exception {
-
-    testCase = SplitFilterTestCase.builder()
-        .maxInputSize(DEFAULT_SPLIT_SIZE * 3 + SYNC_INTERVAL * 2)
-        .inputFiles(
-            MockInputFile.builder()
-                .path("A")
-                .selectedSplit()
-                .split()
-                .selectedSplit()
-                .split()
-                .selectedSplit()
-                .build()
-        )
-        .expectedSplits(
-            createMockSplit("A", 0, DEFAULT_SPLIT_SIZE),
-            createMockSplit("A", DEFAULT_SPLIT_SIZE * 2 - SYNC_INTERVAL, 
DEFAULT_SPLIT_SIZE + SYNC_INTERVAL),
-            createMockSplit("A", DEFAULT_SPLIT_SIZE * 4 - SYNC_INTERVAL, 
DEFAULT_SPLIT_SIZE + SYNC_INTERVAL)
-        )
-        .build();
-
-    testCase.executeAndValidate();
-  }
-
-  @Test(expected = IOException.class)
-  public void testMaxSplitsSizeNegative() throws Exception {
-    testCase = SplitFilterTestCase.builder()
-        .maxInputSize(DEFAULT_SPLIT_SIZE * 3)
-        .inputFiles(
-            MockInputFile.builder()
-                .path("A")
-                .selectedSplit()
-                .split()
-                .selectedSplit()
-                .split()
-                .selectedSplit()
-                .build()
-        )
-        .expectedSplits()
-        .build();
-
-    testCase.executeAndValidate();
-  }
-}

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
----------------------------------------------------------------------
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java 
b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
index b5b478f..d982555 100755
--- a/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/metadata/TestHive.java
@@ -41,7 +41,6 @@ import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.metastore.api.Index;
 import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
-import org.apache.hadoop.hive.ql.index.HiveIndex;
 import org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.stats.StatsUtils;
@@ -688,127 +687,6 @@ public class TestHive extends TestCase {
     }
   }
 
-  /**
-   * Tests creating a simple index on a simple table.
-   *
-   * @throws Throwable
-   */
-  public void testIndex() throws Throwable {
-    try{
-      // create a simple table
-      String tableName = "table_for_testindex";
-      String qTableName = Warehouse.DEFAULT_DATABASE_NAME + "." + tableName;
-      try {
-        hm.dropTable(Warehouse.DEFAULT_DATABASE_NAME, tableName);
-      } catch (HiveException e) {
-        e.printStackTrace();
-        assertTrue("Unable to drop table", false);
-      }
-
-      Table tbl = new Table(Warehouse.DEFAULT_DATABASE_NAME, tableName);
-      List<FieldSchema> fields = tbl.getCols();
-
-      fields.add(new FieldSchema("col1", serdeConstants.INT_TYPE_NAME, "int -- 
first column"));
-      fields.add(new FieldSchema("col2", serdeConstants.STRING_TYPE_NAME,
-          "string -- second column"));
-      fields.add(new FieldSchema("col3", serdeConstants.DOUBLE_TYPE_NAME,
-          "double -- thrift column"));
-      tbl.setFields(fields);
-
-      tbl.setOutputFormatClass(HiveIgnoreKeyTextOutputFormat.class);
-      tbl.setInputFormatClass(SequenceFileInputFormat.class);
-
-      // create table
-      try {
-        hm.createTable(tbl);
-      } catch (HiveException e) {
-        e.printStackTrace();
-        assertTrue("Unable to create table: " + tableName, false);
-      }
-
-      // Create a simple index
-      String indexName = "index_on_table_for_testindex";
-      String indexHandlerClass = 
HiveIndex.IndexType.COMPACT_SUMMARY_TABLE.getHandlerClsName();
-      List<String> indexedCols = new ArrayList<String>();
-      indexedCols.add("col1");
-      String indexTableName = "index_on_table_for_testindex_table";
-      String qIndexTableName = Warehouse.DEFAULT_DATABASE_NAME + "." + 
indexTableName;
-      boolean deferredRebuild = true;
-      String inputFormat = SequenceFileInputFormat.class.getName();
-      String outputFormat = SequenceFileOutputFormat.class.getName();
-      String serde = null;
-      String storageHandler = null;
-      String location = null;
-      String collItemDelim = null;
-      String fieldDelim = null;
-      String fieldEscape = null;
-      String lineDelim = null;
-      String mapKeyDelim = null;
-      String indexComment = null;
-      Map<String, String> indexProps = null;
-      Map<String, String> tableProps = null;
-      Map<String, String> serdeProps = new HashMap<String, String>();
-      hm.createIndex(qTableName, indexName, indexHandlerClass, indexedCols, 
qIndexTableName,
-          deferredRebuild, inputFormat, outputFormat, serde, storageHandler, 
location,
-          indexProps, tableProps, serdeProps, collItemDelim, fieldDelim, 
fieldEscape, lineDelim,
-          mapKeyDelim, indexComment);
-
-      // Retrieve and validate the index
-      Index index = null;
-      try {
-        index = hm.getIndex(tableName, indexName);
-        assertNotNull("Unable to fetch index", index);
-        index.validate();
-        assertEquals("Index names don't match for index: " + indexName, 
indexName,
-            index.getIndexName());
-        assertEquals("Table names don't match for index: " + indexName, 
tableName,
-            index.getOrigTableName());
-        assertEquals("Index table names didn't match for index: " + indexName, 
indexTableName,
-            index.getIndexTableName());
-        assertEquals("Index handler classes didn't match for index: " + 
indexName,
-            indexHandlerClass, index.getIndexHandlerClass());
-        assertEquals("Deferred rebuild didn't match for index: " + indexName, 
deferredRebuild,
-            index.isDeferredRebuild());
-
-      } catch (HiveException e) {
-        System.err.println(StringUtils.stringifyException(e));
-        assertTrue("Unable to fetch index correctly: " + indexName, false);
-      }
-
-      // Drop index
-      try {
-        hm.dropIndex(Warehouse.DEFAULT_DATABASE_NAME, tableName, indexName, 
false, true);
-      } catch (HiveException e) {
-        System.err.println(StringUtils.stringifyException(e));
-        assertTrue("Unable to drop index: " + indexName, false);
-      }
-
-      boolean dropIndexException = false;
-      try {
-        hm.getIndex(tableName, indexName);
-      } catch (HiveException e) {
-        // Expected since it was just dropped
-        dropIndexException = true;
-      }
-
-      assertTrue("Unable to drop index: " + indexName, dropIndexException);
-
-      // Drop table
-      try {
-        hm.dropTable(tableName);
-        Table droppedTable = hm.getTable(tableName, false);
-        assertNull("Unable to drop table " + tableName, droppedTable);
-      } catch (HiveException e) {
-        System.err.println(StringUtils.stringifyException(e));
-        assertTrue("Unable to drop table: " + tableName, false);
-      }
-    } catch (Throwable e) {
-      System.err.println(StringUtils.stringifyException(e));
-      System.err.println("testIndex failed");
-      throw e;
-    }
-  }
-
   public void testHiveRefreshOnConfChange() throws Throwable{
     Hive prevHiveObj = Hive.get();
     prevHiveObj.getDatabaseCurrent();

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
----------------------------------------------------------------------
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
index 87cd98f..4a33885 100644
--- 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
+++ 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/authorization/TestPrivilegesV1.java
@@ -66,7 +66,6 @@ public class TestPrivilegesV1 extends PrivilegesTestBase{
     grantUserTable("alter", PrivilegeType.ALTER_METADATA);
     grantUserTable("create", PrivilegeType.CREATE);
     grantUserTable("drop", PrivilegeType.DROP);
-    grantUserTable("index", PrivilegeType.INDEX);
     grantUserTable("lock", PrivilegeType.LOCK);
     grantUserTable("select", PrivilegeType.SELECT);
     grantUserTable("show_database", PrivilegeType.SHOW_DATABASE);

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/alter_concatenate_indexed_table.q
----------------------------------------------------------------------
diff --git 
a/ql/src/test/queries/clientnegative/alter_concatenate_indexed_table.q 
b/ql/src/test/queries/clientnegative/alter_concatenate_indexed_table.q
deleted file mode 100644
index 6de8c7f..0000000
--- a/ql/src/test/queries/clientnegative/alter_concatenate_indexed_table.q
+++ /dev/null
@@ -1,18 +0,0 @@
-set hive.strict.checks.bucketing=false;
-
-set hive.exec.concatenate.check.index=true;
-create table src_rc_concatenate_test(key int, value string) stored as rcfile;
-
-load data local inpath '../../data/files/smbbucket_1.rc' into table 
src_rc_concatenate_test;
-load data local inpath '../../data/files/smbbucket_2.rc' into table 
src_rc_concatenate_test;
-load data local inpath '../../data/files/smbbucket_3.rc' into table 
src_rc_concatenate_test;
-
-show table extended like `src_rc_concatenate_test`;
-
-select count(1) from src_rc_concatenate_test;
-select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test;
-
-create index src_rc_concatenate_test_index on table 
src_rc_concatenate_test(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES 
("prop1"="val1", "prop2"="val2"); 
-show indexes on src_rc_concatenate_test;
-
-alter table src_rc_concatenate_test concatenate;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/authorization_create_index.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/authorization_create_index.q 
b/ql/src/test/queries/clientnegative/authorization_create_index.q
deleted file mode 100644
index eeb5673..0000000
--- a/ql/src/test/queries/clientnegative/authorization_create_index.q
+++ /dev/null
@@ -1,7 +0,0 @@
-set hive.test.authz.sstd.hs2.mode=true;
-set 
hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
-set 
hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
-set hive.security.authorization.enabled=true;
-create table t1 (a int);
-set user.name=user2;
-create index t1_index on table t1(a) as 'COMPACT' WITH DEFERRED REBUILD;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/authorization_drop_index.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/authorization_drop_index.q 
b/ql/src/test/queries/clientnegative/authorization_drop_index.q
deleted file mode 100644
index d984d06..0000000
--- a/ql/src/test/queries/clientnegative/authorization_drop_index.q
+++ /dev/null
@@ -1,8 +0,0 @@
-set hive.test.authz.sstd.hs2.mode=true;
-set 
hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
-set 
hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
-set hive.security.authorization.enabled=true;
-create table t1 (a int);
-create index t1_index on table t1(a) as 'COMPACT' WITH DEFERRED REBUILD;
-set user.name=user2;
-drop index t1_index on t1;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/authorization_invalid_priv_v2.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/authorization_invalid_priv_v2.q 
b/ql/src/test/queries/clientnegative/authorization_invalid_priv_v2.q
index 5d4c95e..ae28997 100644
--- a/ql/src/test/queries/clientnegative/authorization_invalid_priv_v2.q
+++ b/ql/src/test/queries/clientnegative/authorization_invalid_priv_v2.q
@@ -2,5 +2,5 @@ set hive.test.authz.sstd.hs2.mode=true;
 set 
hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
 
 create table if not exists authorization_invalid_v2 (key int, value string);
-grant index on table authorization_invalid_v2 to user hive_test_user;
+grant lock on table authorization_invalid_v2 to user hive_test_user;
 drop table authorization_invalid_v2;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/authorization_uri_index.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/authorization_uri_index.q 
b/ql/src/test/queries/clientnegative/authorization_uri_index.q
deleted file mode 100644
index 795928d..0000000
--- a/ql/src/test/queries/clientnegative/authorization_uri_index.q
+++ /dev/null
@@ -1,14 +0,0 @@
-set hive.test.authz.sstd.hs2.mode=true;
-set 
hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
-set 
hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
-set hive.security.authorization.enabled=true;
-
-dfs ${system:test.dfs.mkdir} ${system:test.tmp.dir}/az_uri_index;
-dfs -touchz ${system:test.tmp.dir}/az_uri_index/1.txt;
-dfs -chmod 555 ${system:test.tmp.dir}/az_uri_index/1.txt;
-
-
-create table t1(i int);
-create index idt1 on table t1 (i) as 'COMPACT' WITH DEFERRED REBUILD LOCATION 
'${system:test.tmp.dir}/az_uri_index/';
-
--- Attempt to use location for index that does not have permissions should fail

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/bad_indextype.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/bad_indextype.q 
b/ql/src/test/queries/clientnegative/bad_indextype.q
deleted file mode 100644
index 8f5bf42..0000000
--- a/ql/src/test/queries/clientnegative/bad_indextype.q
+++ /dev/null
@@ -1 +0,0 @@
-CREATE INDEX srcpart_index_proj ON TABLE srcpart(key) AS 'UNKNOWN' WITH 
DEFERRED REBUILD;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/drop_index_failure.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/drop_index_failure.q 
b/ql/src/test/queries/clientnegative/drop_index_failure.q
deleted file mode 100644
index 6e907df..0000000
--- a/ql/src/test/queries/clientnegative/drop_index_failure.q
+++ /dev/null
@@ -1,3 +0,0 @@
-set hive.exec.drop.ignorenonexistent=false;
--- Can't use DROP INDEX if the index doesn't exist and IF EXISTS isn't 
specified
-DROP INDEX UnknownIndex ON src;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/index_bitmap_no_map_aggr.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/index_bitmap_no_map_aggr.q 
b/ql/src/test/queries/clientnegative/index_bitmap_no_map_aggr.q
deleted file mode 100644
index a17cd1f..0000000
--- a/ql/src/test/queries/clientnegative/index_bitmap_no_map_aggr.q
+++ /dev/null
@@ -1,7 +0,0 @@
-EXPLAIN
-CREATE INDEX src1_index ON TABLE src(key) as 'BITMAP' WITH DEFERRED REBUILD;
-
-SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
-SET hive.map.aggr=false;
-CREATE INDEX src1_index ON TABLE src(key) as 'BITMAP' WITH DEFERRED REBUILD;
-ALTER INDEX src1_index ON src REBUILD;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/index_compact_entry_limit.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/index_compact_entry_limit.q 
b/ql/src/test/queries/clientnegative/index_compact_entry_limit.q
deleted file mode 100644
index 63973e6..0000000
--- a/ql/src/test/queries/clientnegative/index_compact_entry_limit.q
+++ /dev/null
@@ -1,13 +0,0 @@
-set hive.mapred.mode=nonstrict;
-set hive.stats.dbclass=fs;
-drop index src_index on src;
-
-CREATE INDEX src_index ON TABLE src(key) as 'COMPACT' WITH DEFERRED REBUILD;
-ALTER INDEX src_index ON src REBUILD;
-
-SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
-INSERT OVERWRITE DIRECTORY "${system:test.tmp.dir}/index_result" SELECT 
`_bucketname` ,  `_offsets` FROM default__src_src_index__ WHERE key<1000;
-SET hive.index.compact.file=${system:test.tmp.dir}/index_result;
-SET 
hive.input.format=org.apache.hadoop.hive.ql.index.compact.HiveCompactIndexInputFormat;
-SET hive.index.compact.query.max.entries=5;
-SELECT key, value FROM src WHERE key=100 ORDER BY key;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/index_compact_size_limit.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/index_compact_size_limit.q 
b/ql/src/test/queries/clientnegative/index_compact_size_limit.q
deleted file mode 100644
index ae4e265..0000000
--- a/ql/src/test/queries/clientnegative/index_compact_size_limit.q
+++ /dev/null
@@ -1,14 +0,0 @@
-set hive.mapred.mode=nonstrict;
-set hive.stats.dbclass=fs;
-drop index src_index on src;
-
-CREATE INDEX src_index ON TABLE src(key) as 'COMPACT' WITH DEFERRED REBUILD;
-ALTER INDEX src_index ON src REBUILD;
-
-SET hive.input.format=org.apache.hadoop.hive.ql.io.HiveInputFormat;
-INSERT OVERWRITE DIRECTORY "${system:test.tmp.dir}/index_result" SELECT 
`_bucketname` ,  `_offsets` FROM default__src_src_index__ WHERE key<1000;
-SET hive.index.compact.file=${system:test.tmp.dir}/index_result;
-SET 
hive.input.format=org.apache.hadoop.hive.ql.index.compact.HiveCompactIndexInputFormat;
-SET hive.index.compact.query.max.size=1024;
-SELECT key, value FROM src WHERE key=100 ORDER BY key;
-

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/merge_negative_1.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/merge_negative_1.q 
b/ql/src/test/queries/clientnegative/merge_negative_1.q
deleted file mode 100644
index 0a48c01..0000000
--- a/ql/src/test/queries/clientnegative/merge_negative_1.q
+++ /dev/null
@@ -1,3 +0,0 @@
-create table src2 like src;
-CREATE INDEX src_index_merge_test ON TABLE src2(key) as 'COMPACT' WITH 
DEFERRED REBUILD;
-alter table src2 concatenate;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/show_create_table_index.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/show_create_table_index.q 
b/ql/src/test/queries/clientnegative/show_create_table_index.q
deleted file mode 100644
index 0dd0ef9..0000000
--- a/ql/src/test/queries/clientnegative/show_create_table_index.q
+++ /dev/null
@@ -1,6 +0,0 @@
-CREATE TABLE tmp_showcrt (key int, value string);
-CREATE INDEX tmp_index on table tmp_showcrt(key) as 'compact' WITH DEFERRED 
REBUILD;
-SHOW CREATE TABLE default__tmp_showcrt_tmp_index__;
-DROP INDEX tmp_index on tmp_showcrt;
-DROP TABLE tmp_showcrt;
-

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/temp_table_index.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/temp_table_index.q 
b/ql/src/test/queries/clientnegative/temp_table_index.q
deleted file mode 100644
index 91f45ce..0000000
--- a/ql/src/test/queries/clientnegative/temp_table_index.q
+++ /dev/null
@@ -1,2 +0,0 @@
-create temporary table tmp1 (c1 string);
-create index tmp1_idx on table tmp1 (c1) as 'COMPACT' with deferred rebuild;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientnegative/truncate_column_indexed_table.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientnegative/truncate_column_indexed_table.q 
b/ql/src/test/queries/clientnegative/truncate_column_indexed_table.q
deleted file mode 100644
index 13f32c8..0000000
--- a/ql/src/test/queries/clientnegative/truncate_column_indexed_table.q
+++ /dev/null
@@ -1,9 +0,0 @@
--- Tests truncating a column from an indexed table
-
-CREATE TABLE test_tab (key STRING, value STRING) STORED AS RCFILE;
-
-INSERT OVERWRITE TABLE test_tab SELECT * FROM src;
-
-CREATE INDEX test_tab_index ON TABLE test_tab (key) as 'COMPACT' WITH DEFERRED 
REBUILD;
-
-TRUNCATE TABLE test_tab COLUMNS (value);

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientpositive/alter_concatenate_indexed_table.q
----------------------------------------------------------------------
diff --git 
a/ql/src/test/queries/clientpositive/alter_concatenate_indexed_table.q 
b/ql/src/test/queries/clientpositive/alter_concatenate_indexed_table.q
deleted file mode 100644
index 3a9e14c..0000000
--- a/ql/src/test/queries/clientpositive/alter_concatenate_indexed_table.q
+++ /dev/null
@@ -1,51 +0,0 @@
-set hive.strict.checks.bucketing=false;
-
-set hive.mapred.mode=nonstrict;
-set hive.exec.concatenate.check.index =false;
-create table src_rc_concatenate_test(key int, value string) stored as rcfile;
-
-load data local inpath '../../data/files/smbbucket_1.rc' into table 
src_rc_concatenate_test;
-load data local inpath '../../data/files/smbbucket_2.rc' into table 
src_rc_concatenate_test;
-load data local inpath '../../data/files/smbbucket_3.rc' into table 
src_rc_concatenate_test;
-
-show table extended like `src_rc_concatenate_test`;
-
-select count(1) from src_rc_concatenate_test;
-select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test;
-
-create index src_rc_concatenate_test_index on table 
src_rc_concatenate_test(key) as 'compact' WITH DEFERRED REBUILD IDXPROPERTIES 
("prop1"="val1", "prop2"="val2"); 
-show indexes on src_rc_concatenate_test;
-
-alter table src_rc_concatenate_test concatenate;
-
-show table extended like `src_rc_concatenate_test`;
-
-select count(1) from src_rc_concatenate_test;
-select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test;
-
-drop index src_rc_concatenate_test_index on src_rc_concatenate_test;
-
-create table src_rc_concatenate_test_part(key int, value string) partitioned 
by (ds string) stored as rcfile;
-
-alter table src_rc_concatenate_test_part add partition (ds='2011');
-
-load data local inpath '../../data/files/smbbucket_1.rc' into table 
src_rc_concatenate_test_part partition (ds='2011');
-load data local inpath '../../data/files/smbbucket_2.rc' into table 
src_rc_concatenate_test_part partition (ds='2011');
-load data local inpath '../../data/files/smbbucket_3.rc' into table 
src_rc_concatenate_test_part partition (ds='2011');
-
-show table extended like `src_rc_concatenate_test_part` partition (ds='2011');
-
-select count(1) from src_rc_concatenate_test_part;
-select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test_part;
-
-create index src_rc_concatenate_test_part_index on table 
src_rc_concatenate_test_part(key) as 'compact' WITH DEFERRED REBUILD 
IDXPROPERTIES ("prop1"="val1", "prop2"="val2");
-show indexes on src_rc_concatenate_test_part;
-
-alter table src_rc_concatenate_test_part partition (ds='2011') concatenate;
-
-show table extended like `src_rc_concatenate_test_part` partition (ds='2011');
-
-select count(1) from src_rc_concatenate_test_part;
-select sum(hash(key)), sum(hash(value)) from src_rc_concatenate_test_part;
-
-drop index src_rc_concatenate_test_part_index on src_rc_concatenate_test_part;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientpositive/alter_index.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/alter_index.q 
b/ql/src/test/queries/clientpositive/alter_index.q
deleted file mode 100644
index 3a3d13c..0000000
--- a/ql/src/test/queries/clientpositive/alter_index.q
+++ /dev/null
@@ -1,11 +0,0 @@
-drop index src_index_8 on src;
-
-create index src_index_8 on table default.src(key) as 'compact' WITH DEFERRED 
REBUILD IDXPROPERTIES ("prop1"="val1", "prop2"="val2");
-desc extended default__src_src_index_8__;
-
-alter index src_index_8 on default.src set IDXPROPERTIES ("prop1"="val1_new", 
"prop3"="val3");
-desc extended default__src_src_index_8__;
-
-drop index src_index_8 on default.src;
-
-show tables;

http://git-wip-us.apache.org/repos/asf/hive/blob/b0d3cb45/ql/src/test/queries/clientpositive/authorization_index.q
----------------------------------------------------------------------
diff --git a/ql/src/test/queries/clientpositive/authorization_index.q 
b/ql/src/test/queries/clientpositive/authorization_index.q
deleted file mode 100644
index b8dd577..0000000
--- a/ql/src/test/queries/clientpositive/authorization_index.q
+++ /dev/null
@@ -1,13 +0,0 @@
-set hive.test.authz.sstd.hs2.mode=true;
-set 
hive.security.authorization.manager=org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactoryForTest;
-set 
hive.security.authenticator.manager=org.apache.hadoop.hive.ql.security.SessionStateConfigUserAuthenticator;
-set hive.stats.dbclass=fs;
-set hive.security.authorization.enabled=true;
-create table t1 (a int);
-create index t1_index on table t1(a) as 'COMPACT' WITH DEFERRED REBUILD;
-desc formatted default__t1_t1_index__;
-alter index t1_index on t1 rebuild;
-
-drop table t1;
-
-set hive.security.authorization.enabled=false;

Reply via email to