Repository: lens
Updated Branches:
  refs/heads/master d264c9a44 -> d6b121692


http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java 
b/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
index 1c0cd35..18c2f2c 100644
--- a/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
+++ b/lens-server/src/main/java/org/apache/lens/server/rewrite/RewriteUtil.java
@@ -107,9 +107,9 @@ public final class RewriteUtil {
     int childCount = ast.getChildCount();
     if (ast.getToken() != null) {
       if (log.isDebugEnabled() && ast.getChild(0) != null) {
-        log.debug("First child: {} Type:{}", ast.getChild(0), ((ASTNode) 
ast.getChild(0)).getToken().getType());
+        log.debug("First child: {} Type:{}", ast.getChild(0), 
ast.getChild(0).getType());
       }
-      if (ast.getToken().getType() == HiveParser.TOK_QUERY
+      if (ast.getType() == HiveParser.TOK_QUERY
         && (isCubeKeywordNode((ASTNode) ast.getChild(0)) || 
isFromNodeWithCubeTable((ASTNode) ast.getChild(0), conf))) {
         log.debug("Inside cube clause");
         CubeQueryInfo cqi = new CubeQueryInfo();
@@ -120,34 +120,34 @@ public final class RewriteUtil {
             cqi.startPos = ast.getCharPositionInLine();
           } else {
             ASTNode selectAST = (ASTNode) ast.getChild(1).getChild(1);
-            // Left most child of select AST will have char position just 
after select / select distinct
-            // Go back one "select[ distinct]"
+            // Left most child of select AST will have char position after 
select with
+            // no selects in between. search for select backward from there.
             cqi.startPos = getStartPos(originalQuery, 
HQLParser.leftMostChild(selectAST).getCharPositionInLine(),
-              "distinct");
-            cqi.startPos = getStartPos(originalQuery, cqi.startPos, "select");
+              "select");
           }
           int ci = ast.getChildIndex();
-          if (parent.getToken() == null || parent.getToken().getType() == 
HiveParser.TOK_EXPLAIN
-            || parent.getToken().getType() == HiveParser.TOK_CREATETABLE) {
+          if (parent.getToken() == null || parent.getType() == 
HiveParser.TOK_EXPLAIN
+            || parent.getType() == HiveParser.TOK_CREATETABLE) {
             // Not a sub query
             cqi.endPos = originalQuery.length();
-          } else if (parent.getChildCount() > ci + 1) {
-            if (parent.getToken().getType() == HiveParser.TOK_SUBQUERY) {
+          } else if (parent.getChildCount() > ci + 1
+            || (parent.getParent() != null && parent.getType() == 
parent.getParent().getType())) {
+            if (parent.getType() == HiveParser.TOK_SUBQUERY) {
               // less for the next start and for close parenthesis
               cqi.endPos = getEndPos(originalQuery, parent.getChild(ci + 
1).getCharPositionInLine(), ")");
-            } else if (parent.getToken().getType() == HiveParser.TOK_UNION) {
-              // one less for the next start and less the size of string 
'UNION ALL'
-              ASTNode nextChild = (ASTNode) parent.getChild(ci + 1);
-              if (isCubeKeywordNode((ASTNode) nextChild.getChild(0))) {
-                cqi.endPos = getEndPos(originalQuery, 
nextChild.getCharPositionInLine() - 1, "UNION ALL");
+            } else if (parent.getType() == HiveParser.TOK_UNIONALL) {
+              ASTNode nextChild;
+              if (parent.getChildCount() > ci + 1) {
+                // top level child
+                nextChild = (ASTNode) parent.getChild(ci + 1);
               } else {
-                // Go back one "union all select[ distinct]"
-                cqi.endPos = getEndPos(originalQuery, 
nextChild.getChild(1).getChild(1).getCharPositionInLine() - 1,
-                  "distinct");
-                cqi.endPos = getEndPos(originalQuery, cqi.endPos, "select");
-                cqi.endPos = getEndPos(originalQuery, cqi.endPos, "union all");
+                // middle child, it's left child's right child.
+                nextChild = (ASTNode) 
parent.getParent().getChild(parent.getChildIndex()+1);
               }
-
+              // Go back one select
+              cqi.endPos = getStartPos(originalQuery, 
nextChild.getChild(1).getChild(1).getCharPositionInLine() - 1,
+                "select");
+              cqi.endPos = getEndPos(originalQuery, cqi.endPos, "union all");
             } else {
               // Not expected to reach here
               log.warn("Unknown query pattern found with AST:{}", ast.dump());
@@ -159,8 +159,12 @@ public final class RewriteUtil {
             // and one for the close parenthesis if there are no more unionall
             // or one for the string 'UNION ALL' if there are more union all
             log.debug("Child of union all");
-            cqi.endPos = getEndPos(originalQuery, 
parent.getParent().getChild(1).getCharPositionInLine(), ")",
-              "UNION ALL");
+            cqi.endPos = 
parent.getParent().getChild(1).getCharPositionInLine();
+            if (cqi.endPos != 0) {
+              cqi.endPos = getEndPos(originalQuery, cqi.endPos, ")", "UNION 
ALL");
+            } else {
+              cqi.endPos = originalQuery.length();
+            }
           }
         }
         if (log.isDebugEnabled()) {
@@ -207,7 +211,7 @@ public final class RewriteUtil {
   }
 
   private static boolean isCubeKeywordNode(ASTNode child) {
-    return child.getToken().getType() == HiveParser.KW_CUBE;
+    return child.getType() == HiveParser.KW_CUBE;
   }
 
   /**
@@ -219,18 +223,19 @@ public final class RewriteUtil {
    * @return the end pos
    */
   private static int getEndPos(String query, int backTrackIndex, String... 
backTrackStr) {
-    backTrackIndex = backTrack(query, backTrackIndex, backTrackStr);
+    backTrackIndex = backTrack(query, backTrackIndex, false, backTrackStr);
     while (backTrackIndex > 0 && 
Character.isSpaceChar(query.charAt(backTrackIndex - 1))) {
       backTrackIndex--;
     }
     return backTrackIndex;
   }
 
-  private static int backTrack(String query, int backTrackIndex, String... 
backTrackStr) {
+  private static int backTrack(String query, int backTrackIndex, boolean 
force, String... backTrackStr) {
     if (backTrackStr != null) {
       String q = query.substring(0, backTrackIndex).toLowerCase();
+      String qTrim = q.trim();
       for (String aBackTrackStr : backTrackStr) {
-        if (q.trim().endsWith(aBackTrackStr.toLowerCase())) {
+        if ((force  && qTrim.contains(aBackTrackStr.toLowerCase()))|| 
qTrim.endsWith(aBackTrackStr.toLowerCase())) {
           backTrackIndex = q.lastIndexOf(aBackTrackStr.toLowerCase());
           break;
         }
@@ -248,7 +253,7 @@ public final class RewriteUtil {
    * @return the end pos
    */
   private static int getStartPos(String query, int backTrackIndex, String... 
backTrackStr) {
-    backTrackIndex = backTrack(query, backTrackIndex, backTrackStr);
+    backTrackIndex = backTrack(query, backTrackIndex, true, backTrackStr);
     while (backTrackIndex < query.length() && 
Character.isSpaceChar(query.charAt(backTrackIndex))) {
       backTrackIndex++;
     }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
 
b/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
index 2745b27..97d5f16 100644
--- 
a/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
+++ 
b/lens-server/src/main/java/org/apache/lens/server/session/HiveSessionService.java
@@ -38,22 +38,16 @@ import org.apache.lens.server.api.LensConfConstants;
 import org.apache.lens.server.api.error.LensException;
 import org.apache.lens.server.api.health.HealthStatus;
 import org.apache.lens.server.api.query.QueryExecutionService;
-import org.apache.lens.server.api.session.SessionClosed;
-import org.apache.lens.server.api.session.SessionExpired;
-import org.apache.lens.server.api.session.SessionOpened;
-import org.apache.lens.server.api.session.SessionRestored;
-import org.apache.lens.server.api.session.SessionService;
+import org.apache.lens.server.api.session.*;
 import org.apache.lens.server.query.QueryExecutionServiceImpl;
 import org.apache.lens.server.session.LensSessionImpl.ResourceEntry;
 
 import org.apache.commons.lang3.StringUtils;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.SystemVariables;
 import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.processors.SetProcessor;
 import org.apache.hadoop.hive.ql.session.SessionState;
-
 import org.apache.hive.service.cli.CLIService;
 import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.OperationHandle;
@@ -61,7 +55,6 @@ import org.apache.hive.service.cli.OperationHandle;
 import com.google.common.collect.Maps;
 import lombok.AccessLevel;
 import lombok.Getter;
-
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -182,17 +175,17 @@ public class HiveSessionService extends BaseLensService 
implements SessionServic
    * @return the session param
    */
   private String getSessionParam(Configuration sessionConf, SessionState ss, 
String varname) {
-    if (varname.indexOf(SetProcessor.HIVEVAR_PREFIX) == 0) {
-      String var = varname.substring(SetProcessor.HIVEVAR_PREFIX.length());
+    if (varname.indexOf(SystemVariables.HIVEVAR_PREFIX) == 0) {
+      String var = varname.substring(SystemVariables.HIVEVAR_PREFIX.length());
       if (ss.getHiveVariables().get(var) != null) {
-        return SetProcessor.HIVEVAR_PREFIX + var + "=" + 
ss.getHiveVariables().get(var);
+        return SystemVariables.HIVEVAR_PREFIX + var + "=" + 
ss.getHiveVariables().get(var);
       } else {
         throw new NotFoundException(varname + " is undefined as a hive 
variable");
       }
     } else {
       String var;
-      if (varname.indexOf(SetProcessor.HIVECONF_PREFIX) == 0) {
-        var = varname.substring(SetProcessor.HIVECONF_PREFIX.length());
+      if (varname.indexOf(SystemVariables.HIVECONF_PREFIX) == 0) {
+        var = varname.substring(SystemVariables.HIVECONF_PREFIX.length());
       } else {
         var = varname;
       }
@@ -275,7 +268,7 @@ public class HiveSessionService extends BaseLensService 
implements SessionServic
         SortedMap<String, String> sortedMap = new TreeMap<String, String>();
         sortedMap.put("silent", (ss.getIsSilent() ? "on" : "off"));
         for (String s : ss.getHiveVariables().keySet()) {
-          sortedMap.put(SetProcessor.HIVEVAR_PREFIX + s, 
ss.getHiveVariables().get(s));
+          sortedMap.put(SystemVariables.HIVEVAR_PREFIX + s, 
ss.getHiveVariables().get(s));
         }
         for (Map.Entry<String, String> entry : 
getSession(sessionid).getSessionConf()) {
           sortedMap.put(entry.getKey(), entry.getValue());
@@ -313,8 +306,8 @@ public class HiveSessionService extends BaseLensService 
implements SessionServic
       // set in session conf
       for(Map.Entry<String, String> entry: config.entrySet()) {
         String var = entry.getKey();
-        if (var.indexOf(SetProcessor.HIVECONF_PREFIX) == 0) {
-          var = var.substring(SetProcessor.HIVECONF_PREFIX.length());
+        if (var.indexOf(SystemVariables.HIVECONF_PREFIX) == 0) {
+          var = var.substring(SystemVariables.HIVECONF_PREFIX.length());
         }
         getSession(sessionid).getSessionConf().set(var, entry.getValue());
         if (addToSession) {

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java 
b/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
index 895a819..900770b 100644
--- 
a/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
+++ 
b/lens-server/src/main/java/org/apache/lens/server/session/LensSessionImpl.java
@@ -25,8 +25,6 @@ import java.io.ObjectOutput;
 import java.util.*;
 import java.util.concurrent.atomic.AtomicInteger;
 
-import javax.ws.rs.NotFoundException;
-
 import org.apache.lens.api.LensSessionHandle;
 import org.apache.lens.cube.metadata.CubeMetastoreClient;
 import org.apache.lens.server.LensServices;
@@ -39,10 +37,11 @@ import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.common.JavaUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hive.service.cli.HiveSQLException;
 import org.apache.hive.service.cli.SessionHandle;
 import org.apache.hive.service.cli.session.HiveSessionImpl;
-import org.apache.hive.service.cli.thrift.TProtocolVersion;
+import org.apache.hive.service.rpc.thrift.TProtocolVersion;
 
 import lombok.AccessLevel;
 import lombok.Data;
@@ -64,7 +63,7 @@ public class LensSessionImpl extends HiveSessionImpl {
 
   /** The session timeout. */
   private long sessionTimeout;
-
+  private int acquireCount = 0;
   /** The conf. */
   private Configuration conf = createDefaultConf();
 
@@ -74,7 +73,6 @@ public class LensSessionImpl extends HiveSessionImpl {
   private final Map<String, List<ResourceEntry>> failedDBResources = new 
HashMap<String, List<ResourceEntry>>();
 
 
-
   /**
    * Cache of database specific class loaders for this session
    * This is updated lazily on add/remove resource calls and switch database 
calls.
@@ -87,23 +85,24 @@ public class LensSessionImpl extends HiveSessionImpl {
 
   /**
    * Inits the persist info.
-   *
-   * @param sessionHandle the session handle
-   * @param username      the username
-   * @param password      the password
    * @param sessionConf   the session conf
    */
-  private void initPersistInfo(SessionHandle sessionHandle, String username, 
String password,
-    Map<String, String> sessionConf) {
-    persistInfo.setSessionHandle(new 
LensSessionHandle(sessionHandle.getHandleIdentifier().getPublicId(), 
sessionHandle
-      .getHandleIdentifier().getSecretId()));
-    persistInfo.setUsername(username);
-    persistInfo.setPassword(password);
+  private void initPersistInfo(Map<String, String> sessionConf) {
+    persistInfo.setSessionHandle(new 
LensSessionHandle(getSessionHandle().getHandleIdentifier().getPublicId(),
+      getSessionHandle().getHandleIdentifier().getSecretId()));
+    persistInfo.setUsername(getUserName());
+    persistInfo.setPassword(getPassword());
     persistInfo.setLastAccessTime(lastAccessTime);
     persistInfo.setSessionConf(sessionConf);
+    if (sessionConf != null) {
+      for (Map.Entry<String, String> entry : sessionConf.entrySet()) {
+        conf.set(entry.getKey(), entry.getValue());
+      }
+    }
   }
 
   private static Configuration sessionDefaultConfig;
+
   /**
    * Creates the default conf.
    *
@@ -142,20 +141,13 @@ public class LensSessionImpl extends HiveSessionImpl {
    * @param username    the username
    * @param password    the password
    * @param serverConf  the server conf
-   * @param sessionConf the session conf
    * @param ipAddress   the ip address
    */
   public LensSessionImpl(TProtocolVersion protocol, String username, String 
password, HiveConf serverConf,
-    Map<String, String> sessionConf, String ipAddress) {
-    super(protocol, username, password, serverConf, sessionConf, ipAddress);
-    initPersistInfo(getSessionHandle(), username, password, sessionConf);
+    String ipAddress) {
+    super(protocol, username, password, serverConf, ipAddress);
     sessionTimeout = 1000 * 
serverConf.getLong(LensConfConstants.SESSION_TIMEOUT_SECONDS,
       LensConfConstants.SESSION_TIMEOUT_SECONDS_DEFAULT);
-    if (sessionConf != null) {
-      for (Map.Entry<String, String> entry : sessionConf.entrySet()) {
-        conf.set(entry.getKey(), entry.getValue());
-      }
-    }
   }
 
   public Configuration getSessionConf() {
@@ -170,18 +162,22 @@ public class LensSessionImpl extends HiveSessionImpl {
    * @param username      the username
    * @param password      the password
    * @param serverConf    the server conf
-   * @param sessionConf   the session conf
    * @param ipAddress     the ip address
    */
   public LensSessionImpl(SessionHandle sessionHandle, TProtocolVersion 
protocol, String username, String password,
-    HiveConf serverConf, Map<String, String> sessionConf, String ipAddress) {
-    super(sessionHandle, protocol, username, password, serverConf, 
sessionConf, ipAddress);
-    initPersistInfo(getSessionHandle(), username, password, sessionConf);
+    HiveConf serverConf, String ipAddress) {
+    super(sessionHandle, protocol, username, password, serverConf, ipAddress);
     sessionTimeout = 1000 * 
serverConf.getLong(LensConfConstants.SESSION_TIMEOUT_SECONDS,
       LensConfConstants.SESSION_TIMEOUT_SECONDS_DEFAULT);
   }
 
   @Override
+  public void open(Map<String, String> sessionConfMap) throws HiveSQLException 
{
+    super.open(sessionConfMap);
+    initPersistInfo(sessionConfMap);
+  }
+
+  @Override
   public void close() throws HiveSQLException {
     super.close();
 
@@ -223,13 +219,12 @@ public class LensSessionImpl extends HiveSessionImpl {
    * @see org.apache.hive.service.cli.session.HiveSessionImpl#acquire()
    */
   public synchronized void acquire() {
-    try {
-      super.acquire();
-      // Update thread's class loader with current DBs class loader
-      
Thread.currentThread().setContextClassLoader(getClassLoader(getCurrentDatabase()));
-    } catch (HiveSQLException e) {
-      throw new NotFoundException("Could not acquire the session", e);
-    }
+    super.acquire(true);
+    acquireCount++;
+    // Update thread's class loader with current DBs class loader
+    ClassLoader classLoader = getClassLoader(getCurrentDatabase());
+    Thread.currentThread().setContextClassLoader(classLoader);
+    SessionState.getSessionConf().setClassLoader(classLoader);
   }
 
   /*
@@ -239,7 +234,10 @@ public class LensSessionImpl extends HiveSessionImpl {
    */
   public synchronized void release() {
     lastAccessTime = System.currentTimeMillis();
-    super.release();
+    acquireCount--;
+    if (acquireCount == 0) {
+      super.release(true);
+    }
   }
 
   public boolean isActive() {
@@ -336,13 +334,14 @@ public class LensSessionImpl extends HiveSessionImpl {
             log.debug("DB resource service gave null class loader for {}", 
database);
           } else {
             if (areResourcesAdded()) {
+              log.debug("adding resources for {}", database);
               // We need to update DB specific classloader with added resources
               updateSessionDbClassLoader(database);
               classLoader = sessionDbClassLoaders.get(database);
             }
           }
-
           return classLoader == null ? 
getSessionState().getConf().getClassLoader() : classLoader;
+
         } catch (LensException e) {
           log.error("Error getting classloader for database {} for session {} "
             + " defaulting to session state class loader", database, 
getSessionHandle().getSessionId(), e);
@@ -475,7 +474,7 @@ public class LensSessionImpl extends HiveSessionImpl {
      * Returns the value of restoreCount for the resource
      * @return
      */
-    public int getRestoreCount(){
+    public int getRestoreCount() {
       return restoreCount.get();
     }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/main/java/org/apache/lens/server/stats/store/log/StatisticsLogPartitionHandler.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/main/java/org/apache/lens/server/stats/store/log/StatisticsLogPartitionHandler.java
 
b/lens-server/src/main/java/org/apache/lens/server/stats/store/log/StatisticsLogPartitionHandler.java
index 6d92ab9..02daba7 100644
--- 
a/lens-server/src/main/java/org/apache/lens/server/stats/store/log/StatisticsLogPartitionHandler.java
+++ 
b/lens-server/src/main/java/org/apache/lens/server/stats/store/log/StatisticsLogPartitionHandler.java
@@ -128,7 +128,7 @@ public class StatisticsLogPartitionHandler extends 
AsyncEventListener<PartitionE
       partSpec.put("dt", key);
       Partition p = client.createPartition(t, partSpec);
       p.setLocation(finalPath.toString());
-      client.alterPartition(database, eventName, p);
+      client.alterPartition(database, eventName, p, null);
       return true;
     } catch (Exception e) {
       LOG.warn("Unable to add the partition ", e);

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java 
b/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
index c610ae4..b519426 100644
--- a/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
+++ b/lens-server/src/test/java/org/apache/lens/server/TestServerRestart.java
@@ -379,7 +379,7 @@ public class TestServerRestart extends 
LensAllApplicationJerseyTest {
     log.info("Previous query status: {}", stat.getStatusMessage());
 
     // After hive server restart, first few queries fail with Invalid 
Operation Handle followed by
-    // Invalid Session Handle. Idle behaviour is to fail with Invalid Session 
Handle immediately.
+    // Invalid Session Handle. Ideal behaviour is to fail with Invalid Session 
Handle immediately.
     // Jira Ticket raised for debugging: 
https://issues.apache.org/jira/browse/LENS-707
 
     final String query = "select COUNT(ID) from test_hive_server_restart";

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/test/java/org/apache/lens/server/common/RestAPITestUtil.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/test/java/org/apache/lens/server/common/RestAPITestUtil.java 
b/lens-server/src/test/java/org/apache/lens/server/common/RestAPITestUtil.java
index 0e39b52..837c33d 100644
--- 
a/lens-server/src/test/java/org/apache/lens/server/common/RestAPITestUtil.java
+++ 
b/lens-server/src/test/java/org/apache/lens/server/common/RestAPITestUtil.java
@@ -228,7 +228,7 @@ public class RestAPITestUtil {
   public static LensQuery waitForQueryToFinish(final WebTarget target, final 
LensSessionHandle lensSessionHandle,
     final QueryHandle handle, QueryStatus.Status status, MediaType mt) throws 
InterruptedException {
     LensQuery lensQuery = waitForQueryToFinish(target, lensSessionHandle, 
handle, mt);
-    assertEquals(lensQuery.getStatus().getStatus(), status);
+    assertEquals(lensQuery.getStatus().getStatus(), status, 
String.valueOf(lensQuery));
     return lensQuery;
   }
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
 
b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
index 45f41c2..d5f5561 100644
--- 
a/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
+++ 
b/lens-server/src/test/java/org/apache/lens/server/metastore/TestMetastoreService.java
@@ -1923,7 +1923,7 @@ public class TestMetastoreService extends LensJerseyTest {
 
       // Do some changes to test update
       cf.alterWeight(20.0);
-      cf.alterColumn(new FieldSchema("c2", "int", "changed to int"));
+      cf.alterColumn(new FieldSchema("c2", "double", "changed to double"));
 
       XFactTable update = JAXBUtils.factTableFromCubeFactTable(cf);
       XStorageTableElement s1Tbl = createStorageTblElement("S1", table, 
"HOURLY");
@@ -1953,7 +1953,7 @@ public class TestMetastoreService extends LensJerseyTest {
 
       boolean foundC2 = false;
       for (FieldSchema fs : cf.getColumns()) {
-        if (fs.getName().equalsIgnoreCase("c2") && 
fs.getType().equalsIgnoreCase("int")) {
+        if (fs.getName().equalsIgnoreCase("c2") && 
fs.getType().equalsIgnoreCase("double")) {
           foundC2 = true;
           break;
         }

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java 
b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
index fd6bae3..62c0280 100644
--- 
a/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
+++ 
b/lens-server/src/test/java/org/apache/lens/server/query/TestQueryService.java
@@ -820,7 +820,9 @@ public class TestQueryService extends LensJerseyTest {
     if (fs.getFileStatus(actualPath).isDir()) {
       assertTrue(isDir);
       for (FileStatus fstat : fs.listStatus(actualPath)) {
-        addRowsFromFile(actualRows, fs, fstat.getPath());
+        if (!fstat.isDirectory()) {
+          addRowsFromFile(actualRows, fs, fstat.getPath());
+        }
       }
     } else {
       assertFalse(isDir);
@@ -1631,7 +1633,7 @@ public class TestQueryService extends LensJerseyTest {
     mp.bodyPart(new 
FormDataBodyPart(FormDataContentDisposition.name("sessionid").build(), 
lensSessionId,
       mt));
     mp.bodyPart(new 
FormDataBodyPart(FormDataContentDisposition.name("query").build(),
-      "cube sdfelect ID from cube_nonexist"));
+      "sdfelect ID from cube_nonexist"));
     mp.bodyPart(new 
FormDataBodyPart(FormDataContentDisposition.name("operation").build(), 
"estimate"));
     mp.bodyPart(new 
FormDataBodyPart(FormDataContentDisposition.name("conf").fileName("conf").build(),
 new LensConf(),
       mt));
@@ -1642,7 +1644,7 @@ public class TestQueryService extends LensJerseyTest {
 
     LensErrorTO expectedLensErrorTO = LensErrorTO.composedOf(
       LensCubeErrorCode.SYNTAX_ERROR.getLensErrorInfo().getErrorCode(),
-      "Syntax Error: line 1:5 cannot recognize input near 'sdfelect' 'ID' 
'from' in select clause",
+      "Syntax Error: line 1:0 cannot recognize input near 'sdfelect' 'ID' 
'from'",
       TestDataUtils.MOCK_STACK_TRACE);
     ErrorResponseExpectedData expectedData = new 
ErrorResponseExpectedData(BAD_REQUEST, expectedLensErrorTO);
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
 
b/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
index 2ee4eb1..cb0d858 100644
--- 
a/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
+++ 
b/lens-server/src/test/java/org/apache/lens/server/query/TestResultFormatting.java
@@ -219,7 +219,7 @@ public class TestResultFormatting extends LensJerseyTest {
       Thread.sleep(100);
     }
 
-    assertEquals(ctx.getStatus().getStatus(), status);
+    assertEquals(ctx.getStatus().getStatus(), status, String.valueOf(ctx));
 
     if (status.equals(QueryStatus.Status.SUCCESSFUL)) {
       QueryContext qctx = queryService.getQueryContext(handle);

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java 
b/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
index 0e640ec..83dd1f7 100644
--- 
a/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
+++ 
b/lens-server/src/test/java/org/apache/lens/server/rewrite/TestRewriting.java
@@ -39,6 +39,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 import org.apache.hadoop.hive.ql.parse.HiveParser;
 import org.apache.hadoop.hive.ql.parse.ParseException;
+import org.apache.hadoop.hive.ql.session.SessionState;
 
 import org.mockito.Matchers;
 import org.mockito.Mockito;
@@ -60,6 +61,7 @@ import com.codahale.metrics.MetricRegistry;
  */
 @PrepareForTest(RewriteUtil.class)
 @PowerMockIgnore({"org.apache.log4j.*", "javax.management.*", "javax.xml.*",
+  "org.apache.hadoop.*", "org.apache.hive.*", "org.antlr.*",
   "com.sun.org.apache.xerces.internal.jaxp.*", "ch.qos.logback.*", 
"org.slf4j.*", "org.w3c.dom*", "org.mockito.*"})
 public class TestRewriting {
   /**
@@ -78,7 +80,7 @@ public class TestRewriting {
   // number of successful queries through mock rewriter
   // we use this number to mock failures after successful queries
   // change the number, if more tests for success needs to be added
-  static final int NUM_SUCCESS = 63;
+  static final int NUM_SUCCESS = 40;
 
   public static CubeMetastoreClient getMockedClient() {
     CubeMetastoreClient client = Mockito.mock(CubeMetastoreClient.class);
@@ -113,6 +115,7 @@ public class TestRewriting {
         return getMockedCubeContext((ASTNode) args[0]);
       }
     });
+    Mockito.doCallRealMethod().when(mockwriter).clear();
     return mockwriter;
   }
 
@@ -188,29 +191,33 @@ public class TestRewriting {
    */
   @Test
   public void testCubeQuery() throws ParseException, LensException, 
HiveException {
+    SessionState.start(hconf);
     List<LensDriver> drivers = new ArrayList<LensDriver>();
     MockDriver driver = new MockDriver();
     LensConf lensConf = new LensConf();
     Configuration conf = new Configuration();
     driver.configure(conf, null, null);
     drivers.add(driver);
-
+    List<RewriteUtil.CubeQueryInfo> cubeQueries;
+    QueryContext ctx;
+    String q1, q2;
     CubeQueryRewriter mockWriter = getMockedRewriter();
     CubeMetastoreClient mockClient = getMockedClient();
     PowerMockito.stub(PowerMockito.method(RewriteUtil.class, 
"getCubeRewriter")).toReturn(mockWriter);
     PowerMockito.stub(PowerMockito.method(RewriteUtil.class, 
"getClient")).toReturn(mockClient);
-    String q1 = "select name from table";
-    List<RewriteUtil.CubeQueryInfo> cubeQueries = 
RewriteUtil.findCubePositions(q1, hconf);
+
+    q1 = "select name from table";
+    cubeQueries = RewriteUtil.findCubePositions(q1, hconf);
     Assert.assertEquals(cubeQueries.size(), 0);
-    QueryContext ctx = new QueryContext(q1, null, lensConf, conf, drivers);
+    ctx = new QueryContext(q1, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
     conf.set(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY, 
TestRewriting.class.getSimpleName());
     driver.configure(conf, null, null);
-    String q2 = "cube select name from table";
+    q2 = "select name from cube_table";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
+    Assert.assertEquals(cubeQueries.get(0).query, q2);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
     MetricRegistry reg = LensMetricsRegistry.getStaticRegistry();
@@ -238,13 +245,6 @@ public class TestRewriting {
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "insert overwrite directory 'target/rewrite' cube select name from 
table";
-    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
-    Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
-    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
-    runRewrites(RewriteUtil.rewriteQuery(ctx));
-
     q2 = "insert overwrite directory 'target/rewrite' select name from 
cube_table";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
@@ -252,38 +252,24 @@ public class TestRewriting {
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "insert overwrite local directory 'target/rewrite' cube select name 
from table";
+    q2 = "insert overwrite local directory 'target/example-output' select 
id,name from cube_dim_table";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
+    Assert.assertEquals(cubeQueries.get(0).query, "select id,name from 
cube_dim_table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "insert overwrite local directory 'target/example-output' cube select 
id,name from dim_table";
+    q2 = "explain select name from cube_table";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select id,name from 
dim_table");
-    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
-    runRewrites(RewriteUtil.rewriteQuery(ctx));
-
-    q2 = "explain cube select name from table";
-    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
-    Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
-    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
-    runRewrites(RewriteUtil.rewriteQuery(ctx));
-
-    q2 = "select * from (cube select name from table) a";
-    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
-    Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "insert overwrite directory 'target/rewrite' select * from (cube 
select name from table) a";
+    q2 = "select * from (select name from cube_table) a";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
@@ -294,17 +280,17 @@ public class TestRewriting {
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "select * from (cube select name from table)a";
+    q2 = "select * from (select name from cube_table)a";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "select * from  (  cube select name from table   )     a";
+    q2 = "select * from  (  select name from cube_table   )     a";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
@@ -315,15 +301,6 @@ public class TestRewriting {
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "select * from (      cube select name from table where"
-      + " (name = 'ABC'||name = 'XYZ')&&(key=100)   )       a";
-    cubeQueries = 
RewriteUtil.findCubePositions(RewriteUtil.getReplacedQuery(q2), hconf);
-    Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from"
-      + " table where (name = 'ABC' OR name = 'XYZ') AND (key=100)");
-    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
-    runRewrites(RewriteUtil.rewriteQuery(ctx));
-
     q2 = "select * from (      select name from cube_table where"
       + " (name = 'ABC'||name = 'XYZ')&&(key=100)   )       a";
     cubeQueries = 
RewriteUtil.findCubePositions(RewriteUtil.getReplacedQuery(q2), hconf);
@@ -334,11 +311,11 @@ public class TestRewriting {
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
     conf.set(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY, 
TestRewriting.class.getSimpleName() + "-multiple");
-    q2 = "select * from (cube select name from table) a join (cube select" + " 
name2 from table2) b";
+    q2 = "select * from (select name from cube_table) a join (select" + " 
name2 from cube_table2) b";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 2);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
-    Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from 
table2");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
+    Assert.assertEquals(cubeQueries.get(1).query, "select name2 from 
cube_table2");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
     reg = LensMetricsRegistry.getStaticRegistry();
@@ -350,15 +327,6 @@ public class TestRewriting {
         + "-RewriteUtil-rewriteQuery")));
     conf.unset(LensConfConstants.QUERY_METRIC_UNIQUE_ID_CONF_KEY);
 
-    q2 = "select * from (cube select name from table) a full outer join"
-      + " (cube select name2 from table2) b on a.name=b.name2";
-    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
-    Assert.assertEquals(cubeQueries.size(), 2);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
-    Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from 
table2");
-    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
-    runRewrites(RewriteUtil.rewriteQuery(ctx));
-
     q2 = "select * from (select name from cube_table) a full outer join"
       + " (select name2 from cube_table2) b on a.name=b.name2";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
@@ -368,28 +336,28 @@ public class TestRewriting {
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "select * from (cube select name from table) a join (select name2 
from table2) b";
+    q2 = "select * from (select name from cube_table) a join (select name2 
from table2) b";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "select * from (cube select name from table) a join (select name2 
from cube_table2) b";
+    q2 = "select * from (select name from cube_table) a join (select name2 
from cube_table2) b";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 2);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
     Assert.assertEquals(cubeQueries.get(1).query, "select name2 from 
cube_table2");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "select * from (cube select name from table union all cube select 
name2 from table2) u";
+    q2 = "select * from (select name from cube_table union all select name2 
from cube_table2) u";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
     Assert.assertEquals(cubeQueries.size(), 2);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
-    Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from 
table2");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
+    Assert.assertEquals(cubeQueries.get(1).query, "select name2 from 
cube_table2");
 
     q2 = "select * from (select name from cube_table union all select distinct 
name2 from cube_table2) u";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
@@ -408,87 +376,53 @@ public class TestRewriting {
     Assert.assertEquals(cubeQueries.get(1).query, "select name2 from 
cube_table2");
 
     q2 = "insert overwrite directory 'target/rewrite' "
-      + "select * from (cube select name from table union all cube select 
name2 from table2) u";
+      + "select * from (select name from cube_table union all select name2 
from cube_table2) u";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
     Assert.assertEquals(cubeQueries.size(), 2);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
-    Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from 
table2");
-
-    q2 = "insert overwrite directory 'target/rewrite' "
-      + "select * from (cube select name from table union all  select name2 
from cube_table2) u";
-    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
-    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
-    runRewrites(RewriteUtil.rewriteQuery(ctx));
-    Assert.assertEquals(cubeQueries.size(), 2);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
     Assert.assertEquals(cubeQueries.get(1).query, "select name2 from 
cube_table2");
 
-    q2 = "select u.* from (select name from table    union all       cube 
select name2 from table2)   u";
+    q2 = "select u.* from (select name from table    union all       select 
name2 from cube_table2)   u";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name2 from 
table2");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name2 from 
cube_table2");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "select u.* from (select name from table union all cube select name2 
from table2)u";
+    q2 = "select u.* from (select name from table union all select name2 from 
cube_table2)u";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name2 from 
table2");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name2 from 
cube_table2");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "select * from (cube select name from table union all cube select 
name2"
-      + " from table2 union all cube select name3 from table3) u";
+    q2 = "select * from (select name from cube_table union all select name2"
+      + " from cube_table2 union all select name3 from cube_table3) u";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
     Assert.assertEquals(cubeQueries.size(), 3);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
-    Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from 
table2");
-    Assert.assertEquals(cubeQueries.get(2).query, "cube select name3 from 
table3");
-
-    q2 = "select * from   (     cube select name from table    union all   
cube"
-      + " select name2 from table2   union all  cube select name3 from table3 
)  u";
-    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
-    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
-    runRewrites(RewriteUtil.rewriteQuery(ctx));
-    Assert.assertEquals(cubeQueries.size(), 3);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
-    Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from 
table2");
-    Assert.assertEquals(cubeQueries.get(2).query, "cube select name3 from 
table3");
-
-    q2 = "select * from (cube select name from table union all cube select" + 
" name2 from table2) u group by u.name";
-    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
-    Assert.assertEquals(cubeQueries.size(), 2);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
-    Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from 
table2");
-    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
-    runRewrites(RewriteUtil.rewriteQuery(ctx));
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
+    Assert.assertEquals(cubeQueries.get(1).query, "select name2 from 
cube_table2");
+    Assert.assertEquals(cubeQueries.get(2).query, "select name3 from 
cube_table3");
 
-    q2 = "select * from (cube select name from table union all select" + " 
name2 from cube_table2) u group by u.name";
+    q2 = "select * from (select name from cube_table union all select" + " 
name2 from cube_table2) u group by u.name";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     Assert.assertEquals(cubeQueries.size(), 2);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
     Assert.assertEquals(cubeQueries.get(1).query, "select name2 from 
cube_table2");
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
 
-    q2 = "select * from (cube select name from table union all cube select" + 
" name2 from table2)  u group by u.name";
+    q2 = "select * from (select name from cube_table union all select" + " 
name2 from cube_table2)  u group by u.name";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
     Assert.assertEquals(cubeQueries.size(), 2);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
-    Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from 
table2");
-
-    q2 = "create table temp1 as cube select name from table";
-    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
-    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
-    runRewrites(RewriteUtil.rewriteQuery(ctx));
-    Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
+    Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
+    Assert.assertEquals(cubeQueries.get(1).query, "select name2 from 
cube_table2");
 
     q2 = "create table temp1 as select name from cube_table";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
@@ -497,33 +431,14 @@ public class TestRewriting {
     Assert.assertEquals(cubeQueries.size(), 1);
     Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
 
-    q2 = "create table temp1 as select * from (cube select name from table 
union all cube select"
-      + " name2 from table2)  u group by u.name";
-    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
-    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
-    runRewrites(RewriteUtil.rewriteQuery(ctx));
-    Assert.assertEquals(cubeQueries.size(), 2);
-    Assert.assertEquals(cubeQueries.get(0).query, "cube select name from 
table");
-    Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from 
table2");
-
-    q2 = "create table temp1 as select * from (select name from cube_table 
union all cube select"
-      + " name2 from table2)  u group by u.name";
+    q2 = "create table temp1 as select * from (select name from cube_table 
union all select"
+      + " name2 from cube_table2)  u group by u.name";
     cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
     ctx = new QueryContext(q2, null, lensConf, conf, drivers);
     runRewrites(RewriteUtil.rewriteQuery(ctx));
     Assert.assertEquals(cubeQueries.size(), 2);
     Assert.assertEquals(cubeQueries.get(0).query, "select name from 
cube_table");
-    Assert.assertEquals(cubeQueries.get(1).query, "cube select name2 from 
table2");
-
-
-    q2 = "create table temp1 as cube select name from table where"
-      + " time_range_in('dt', '2014-06-24-23', '2014-06-25-00')";
-    cubeQueries = RewriteUtil.findCubePositions(q2, hconf);
-    ctx = new QueryContext(q2, null, lensConf, conf, drivers);
-    runRewrites(RewriteUtil.rewriteQuery(ctx));
-    Assert.assertEquals(cubeQueries.size(), 1);
-    Assert.assertEquals(cubeQueries.get(0).query,
-      "cube select name from table where time_range_in('dt', '2014-06-24-23', 
'2014-06-25-00')");
+    Assert.assertEquals(cubeQueries.get(1).query, "select name2 from 
cube_table2");
 
     q2 = "create table temp1 as select name from cube_table where"
       + " time_range_in('dt', '2014-06-24-23', '2014-06-25-00')";
@@ -534,6 +449,13 @@ public class TestRewriting {
     Assert.assertEquals(cubeQueries.get(0).query,
       "select name from cube_table where time_range_in('dt', '2014-06-24-23', 
'2014-06-25-00')");
 
+    q2 = "SELECT case when a='b' then 'c' else 'd' end some_field from 
cube_table";
+    cubeQueries = RewriteUtil.findCubePositions(q2 + " union all " + q2, 
hconf);
+
+    Assert.assertEquals(cubeQueries.size(), 2);
+    Assert.assertEquals(cubeQueries.get(0).query, q2);
+    Assert.assertEquals(cubeQueries.get(1).query, q2);
+
     // failing query for second driver
     MockDriver driver2 = new MockDriver();
     driver2.configure(conf, null, null);

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
 
b/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
index 5feab71..de03da1 100644
--- 
a/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
+++ 
b/lens-server/src/test/java/org/apache/lens/server/session/TestSessionClassLoaders.java
@@ -62,7 +62,7 @@ public class TestSessionClassLoaders {
     conf.setVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_CLASSNAME, 
LensSessionImpl.class.getName());
     conf.set(LensConfConstants.DATABASE_RESOURCE_DIR, "target/resources");
 
-    CLIService cliService = new CLIService();
+    CLIService cliService = new CLIService(null);
     cliService.init(conf);
 
     sessionService = new HiveSessionService(cliService);

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/test/java/org/apache/lens/server/session/TestSessionExpiry.java
----------------------------------------------------------------------
diff --git 
a/lens-server/src/test/java/org/apache/lens/server/session/TestSessionExpiry.java
 
b/lens-server/src/test/java/org/apache/lens/server/session/TestSessionExpiry.java
index dbdaaec..ca04d6f 100644
--- 
a/lens-server/src/test/java/org/apache/lens/server/session/TestSessionExpiry.java
+++ 
b/lens-server/src/test/java/org/apache/lens/server/session/TestSessionExpiry.java
@@ -52,7 +52,7 @@ public class TestSessionExpiry {
     HiveConf conf = LensServerConf.createHiveConf();
     conf.setVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_CLASSNAME, 
LensSessionImpl.class.getName());
     conf.setLong(LensConfConstants.SESSION_TIMEOUT_SECONDS, 1L);
-    CLIService cliService = new CLIService();
+    CLIService cliService = new CLIService(null);
     cliService.init(conf);
     HiveSessionService lensService = new HiveSessionService(cliService);
     lensService.init(conf);
@@ -91,7 +91,7 @@ public class TestSessionExpiry {
     conf.setVar(HiveConf.ConfVars.HIVE_SESSION_IMPL_CLASSNAME, 
LensSessionImpl.class.getName());
     conf.setLong(LensConfConstants.SESSION_TIMEOUT_SECONDS, 1L);
     conf.setInt(LensConfConstants.SESSION_EXPIRY_SERVICE_INTERVAL_IN_SECS, 1);
-    CLIService cliService = new CLIService();
+    CLIService cliService = new CLIService(null);
     cliService.init(conf);
     HiveSessionService lensService = new HiveSessionService(cliService);
     lensService.init(conf);

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-server/src/test/resources/hive-site.xml
----------------------------------------------------------------------
diff --git a/lens-server/src/test/resources/hive-site.xml 
b/lens-server/src/test/resources/hive-site.xml
index 441f274..5a10ef0 100644
--- a/lens-server/src/test/resources/hive-site.xml
+++ b/lens-server/src/test/resources/hive-site.xml
@@ -38,6 +38,11 @@
   </property>
 
   <property>
+    <name>hive.exec.scratchdir</name>
+    <value>${project.build.directory}/hive/scratch</value>
+  </property>
+
+  <property>
     <name>mapreduce.framework.name</name>
     <value>local</value>
   </property>
@@ -54,6 +59,11 @@
   </property>
 
   <property>
+    <name>datanucleus.schema.autoCreateTables</name>
+    <value>true</value>
+  </property>
+
+  <property>
     <name>hive.lock.manager</name>
     <value>org.apache.hadoop.hive.ql.lockmgr.EmbeddedLockManager</value>
   </property>
@@ -68,4 +78,9 @@
     <value>target/query_logs</value>
   </property>
 
+  <property>
+    <name>hive.support.sql11.reserved.keywords</name>
+    <value>false</value>
+  </property>
+
 </configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-storage-db/pom.xml
----------------------------------------------------------------------
diff --git a/lens-storage-db/pom.xml b/lens-storage-db/pom.xml
index ed58341..7fcd921 100644
--- a/lens-storage-db/pom.xml
+++ b/lens-storage-db/pom.xml
@@ -56,5 +56,13 @@
       <groupId>org.apache.hadoop</groupId>
       <artifactId>hadoop-client</artifactId>
     </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-shims</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.hive</groupId>
+      <artifactId>hive-metastore</artifactId>
+    </dependency>
   </dependencies>
 </project>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/lens-storage-db/src/test/resources/hive-site.xml
----------------------------------------------------------------------
diff --git a/lens-storage-db/src/test/resources/hive-site.xml 
b/lens-storage-db/src/test/resources/hive-site.xml
index 2cb1d8d..9f0db84 100644
--- a/lens-storage-db/src/test/resources/hive-site.xml
+++ b/lens-storage-db/src/test/resources/hive-site.xml
@@ -36,9 +36,18 @@
   </property>
 
   <property>
+    <name>hive.exec.scratchdir</name>
+    <value>${project.build.directory}/hive/scratch</value>
+  </property>
+
+  <property>
     <name>javax.jdo.option.ConnectionURL</name>
     <value>jdbc:derby:;databaseName=target/metastore_db;create=true</value>
     <description>JDBC connect string for a JDBC metastore</description>
   </property>
 
+  <property>
+    <name>datanucleus.schema.autoCreateTables</name>
+    <value>true</value>
+  </property>
 </configuration>

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index ca5c45d..cb213bb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -19,7 +19,8 @@
   under the License.
 
 -->
-<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
   <modelVersion>4.0.0</modelVersion>
 
   <parent>
@@ -42,9 +43,9 @@
     <javax.mail.version>1.4</javax.mail.version>
 
     <!-- hadoop stack -->
-    <hadoop.version>2.4.0</hadoop.version>
+    <hadoop.version>2.6.0</hadoop.version>
     <spark.version>1.3.0</spark.version>
-    <hive.version>0.13.3-inm</hive.version>
+    <hive.version>2.1.1-inm</hive.version>
 
     <!-- jest client for elasticsearch -->
     <jest.version>0.1.5</jest.version>
@@ -104,6 +105,12 @@
     <antrun.plugin.version>1.8</antrun.plugin.version>
     <cobertura.plugin.version>2.7</cobertura.plugin.version>
 
+    <!-- Tests -->
+    <derby.version>10.11.1.1</derby.version>
+    <datanucleus.api.jdo.version>4.2.1</datanucleus.api.jdo.version>
+    <datanucleus.javax.jdo.version>3.2.0-m3</datanucleus.javax.jdo.version>
+    <datanucleus.rdbms.version>4.1.7</datanucleus.rdbms.version>
+
     <!-- UI -->
     <nodejs.plugin.version>1.0.0</nodejs.plugin.version>
     <nodeVersion>0.10.32</nodeVersion>
@@ -1060,6 +1067,18 @@
             <groupId>commons-logging</groupId>
             <artifactId>commons-logging</artifactId>
           </exclusion>
+          <exclusion>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-exec</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-metastore</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-llap-server</artifactId>
+          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -1126,6 +1145,42 @@
             <groupId>org.mortbay.jetty</groupId>
             <artifactId>jetty</artifactId>
           </exclusion>
+          <exclusion>
+            <groupId>org.apache.hadoop</groupId>
+            <artifactId>hadoop-yarn-server-resourcemanager</artifactId>
+          </exclusion>
+        </exclusions>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.hive</groupId>
+        <artifactId>hive-service-rpc</artifactId>
+        <version>${hive.version}</version>
+        <scope>provided</scope>
+        <exclusions>
+          <exclusion>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-api</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.slf4j</groupId>
+            <artifactId>slf4j-log4j12</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>log4j</groupId>
+            <artifactId>log4j</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>commons-logging</groupId>
+            <artifactId>commons-logging</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-exec</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-metastore</artifactId>
+          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -1150,6 +1205,10 @@
             <groupId>commons-logging</groupId>
             <artifactId>commons-logging</artifactId>
           </exclusion>
+          <exclusion>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-shims</artifactId>
+          </exclusion>
         </exclusions>
       </dependency>
       <dependency>
@@ -1199,6 +1258,10 @@
             <artifactId>commons-logging</artifactId>
           </exclusion>
           <exclusion>
+            <groupId>org.apache.hive</groupId>
+            <artifactId>hive-shims</artifactId>
+            </exclusion>
+          <exclusion>
             <groupId>org.apache.avro</groupId>
             <artifactId>avro-mapred</artifactId>
           </exclusion>
@@ -1584,6 +1647,30 @@
         <artifactId>httpcore</artifactId>
         <version>${httpcore.version}</version>
       </dependency>
+      <dependency>
+        <groupId>org.datanucleus</groupId>
+        <artifactId>datanucleus-api-jdo</artifactId>
+        <version>${datanucleus.api.jdo.version}</version>
+        <scope>test</scope>
+      </dependency>
+      <dependency>
+        <groupId>org.datanucleus</groupId>
+        <artifactId>datanucleus-rdbms</artifactId>
+        <version>${datanucleus.rdbms.version}</version>
+        <scope>test</scope>
+      </dependency>
+      <dependency>
+        <groupId>org.apache.derby</groupId>
+        <artifactId>derby</artifactId>
+        <version>${derby.version}</version>
+        <scope>test</scope>
+      </dependency>
+      <dependency>
+        <groupId>org.datanucleus</groupId>
+        <artifactId>javax.jdo</artifactId>
+        <version>${datanucleus.javax.jdo.version}</version>
+        <scope>test</scope>
+      </dependency>
     </dependencies>
   </dependencyManagement>
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/src/site/apt/admin/deployment.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/admin/deployment.apt 
b/src/site/apt/admin/deployment.apt
index 0e1d775..6e0ffef 100644
--- a/src/site/apt/admin/deployment.apt
+++ b/src/site/apt/admin/deployment.apt
@@ -36,13 +36,15 @@ Lens server deployment
 
 * Dependencies and their versions
 
+Lens Versions before 2.6 depend on hive 0.13.x and hadoop 2.x. From Lens 2.6 
onwards, it depends on hive 2.1+.
+
    * Requires java 1.7+.
 
-   * Requires Hadoop 2.x+. Tested upto hadoop 2.4.
+   * Requires Hadoop 2.x+. Tested upto hadoop 2.6.
 
-   * Requires Hive metastore 0.13+
+   * Requires Hive metastore of a compatible version.
 
-   * Requires Hive server2 0.13+
+   * Requires Hive server2 of a compatible version.
 
 * Restart and recovery
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/src/site/apt/developer/contribute.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/developer/contribute.apt 
b/src/site/apt/developer/contribute.apt
index d1fea00..a2fd59e 100644
--- a/src/site/apt/developer/contribute.apt
+++ b/src/site/apt/developer/contribute.apt
@@ -138,7 +138,7 @@ Developer Documentation : How to contribute to Apache Lens?
 
    git checkout <release-tag>
 
-   mvn clean package -DskipTests -Phadoop-2,dist
+   mvn clean package -DskipTests -Pdist,deb
 
 +---+
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/src/site/apt/lenshome/install-and-run.apt
----------------------------------------------------------------------
diff --git a/src/site/apt/lenshome/install-and-run.apt 
b/src/site/apt/lenshome/install-and-run.apt
index 961d926..f133822 100644
--- a/src/site/apt/lenshome/install-and-run.apt
+++ b/src/site/apt/lenshome/install-and-run.apt
@@ -27,6 +27,7 @@ Installing and Running Lens
 
   Download a release of Apache Lens from {{{/releases/download.html}here}}.
   Lens depends on Hive forked from Apache Hive. Download a release of hive 
from 
{{{https://github.com/InMobi/hive/releases}https://github.com/InMobi/hive/releases}}.
+  Lens < 2.6 works with hive 0.13.x releases, Lens >= 2.6 works with hive 2.1+ 
releases.
 
 ** Installing Lens
 
@@ -67,7 +68,7 @@ Installing and Running Lens
 +---+
 
   cd hive-hive-release-<version>-inm
-  mvn clean package -DskipTests -Phadoop-2,dist
+  mvn clean package -DskipTests -Pdist,deb
 
 +---+
 

http://git-wip-us.apache.org/repos/asf/lens/blob/d6b12169/tools/conf/server/lens-site.xml
----------------------------------------------------------------------
diff --git a/tools/conf/server/lens-site.xml b/tools/conf/server/lens-site.xml
index 0803da1..b53ff25 100644
--- a/tools/conf/server/lens-site.xml
+++ b/tools/conf/server/lens-site.xml
@@ -31,4 +31,9 @@
   <name>hive.metastore.warehouse.dir</name>
   <value>/tmp/hive/warehouse</value>
 </property>
+
+<property>
+  <name>datanucleus.schema.autoCreateTables</name>
+  <value>true</value>
+</property>
 </configuration>

Reply via email to