http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index c99fdf1..926a4d0 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -25,9 +25,6 @@ import java.util.*;
 
 import org.apache.lens.cube.metadata.*;
 import org.apache.lens.cube.metadata.ExprColumn.ExprSpec;
-import 
org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
-import org.apache.lens.cube.parse.HQLParser.ASTNodeVisitor;
-import org.apache.lens.cube.parse.HQLParser.TreeNode;
 import org.apache.lens.server.api.error.LensException;
 
 import org.apache.hadoop.hive.ql.parse.ASTNode;
@@ -58,7 +55,7 @@ class ExpressionResolver implements ContextRewriter {
     private Map<CandidateTable, Set<ExprSpecContext>> evaluableExpressions = 
new HashMap<>();
     private boolean hasMeasures = false;
 
-    public boolean hasMeasures() {
+    boolean hasMeasures() {
       return hasMeasures;
     }
 
@@ -148,11 +145,6 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     void addEvaluable(CubeQueryContext cubeql, CandidateTable cTable, 
ExprSpecContext esc) throws LensException {
-      Set<ExprSpecContext> evalSet = evaluableExpressions.get(cTable);
-      if (evalSet == null) {
-        evalSet = new LinkedHashSet<>();
-        evaluableExpressions.put(cTable, evalSet);
-      }
       // add optional dimensions involved in expressions
       for (String table : esc.getTblAliasToColumns().keySet()) {
         if (!CubeQueryContext.DEFAULT_TABLE.equalsIgnoreCase(table) && 
!srcAlias.equals(table)) {
@@ -161,7 +153,7 @@ class ExpressionResolver implements ContextRewriter {
           esc.exprDims.add((Dimension) cubeql.getCubeTableForAlias(table));
         }
       }
-      evalSet.add(esc);
+      evaluableExpressions.computeIfAbsent(cTable, k -> new 
LinkedHashSet<>()).add(esc);
     }
 
     Set<ASTNode> getAllASTNodes() {
@@ -182,13 +174,8 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     boolean isEvaluable(CandidateTable cTable) {
-      if (directlyAvailableIn.contains(cTable)) {
-        return true;
-      }
-      if (evaluableExpressions.get(cTable) == null) {
-        return false;
-      }
-      return !evaluableExpressions.get(cTable).isEmpty();
+      return directlyAvailableIn.contains(cTable)
+        || (evaluableExpressions.get(cTable) != null && 
!evaluableExpressions.get(cTable).isEmpty());
     }
   }
 
@@ -252,16 +239,16 @@ class ExpressionResolver implements ContextRewriter {
       return null;
     }
 
-    public boolean isValidInTimeRange(final TimeRange range) {
+    boolean isValidInTimeRange(final TimeRange range) {
       return isValidFrom(range.getFromDate()) && 
isValidTill(range.getToDate());
     }
 
-    public boolean isValidFrom(@NonNull final Date date) {
-      return (getStartTime() == null) ? true : date.equals(getStartTime()) || 
date.after(getStartTime());
+    boolean isValidFrom(@NonNull final Date date) {
+      return (getStartTime() == null) || (date.equals(getStartTime()) || 
date.after(getStartTime()));
     }
 
-    public boolean isValidTill(@NonNull final Date date) {
-      return (getEndTime() == null) ? true : date.equals(getEndTime()) || 
date.before(getEndTime());
+    boolean isValidTill(@NonNull final Date date) {
+      return (getEndTime() == null) || (date.equals(getEndTime()) || 
date.before(getEndTime()));
     }
 
     public String toString() {
@@ -304,13 +291,7 @@ class ExpressionResolver implements ContextRewriter {
       this.cubeql = cubeql;
     }
     void addExpressionQueried(ExpressionContext expr) {
-      String exprCol = expr.getExprCol().getName().toLowerCase();
-      Set<ExpressionContext> ecSet = allExprsQueried.get(exprCol);
-      if (ecSet == null) {
-        ecSet = new LinkedHashSet<ExpressionContext>();
-        allExprsQueried.put(exprCol, ecSet);
-      }
-      ecSet.add(expr);
+      
allExprsQueried.computeIfAbsent(expr.getExprCol().getName().toLowerCase(), k -> 
new LinkedHashSet<>()).add(expr);
     }
 
     boolean isQueriedExpression(String column) {
@@ -337,7 +318,7 @@ class ExpressionResolver implements ContextRewriter {
       throw new IllegalArgumentException("no expression available for " + expr 
+ " alias:" + alias);
     }
 
-    public boolean hasMeasures(String expr, CubeInterface cube) {
+    boolean hasMeasures(String expr, CubeInterface cube) {
       String alias = cubeql.getAliasForTableName(cube.getName());
       ExpressionContext ec = getExpressionContext(expr, alias);
       boolean hasMeasures = false;
@@ -356,7 +337,7 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     //updates all expression specs which are evaluable
-    public void updateEvaluables(String expr, CandidateTable cTable)
+    void updateEvaluables(String expr, CandidateTable cTable)
       throws LensException {
       String alias = 
cubeql.getAliasForTableName(cTable.getBaseTable().getName());
       ExpressionContext ec = getExpressionContext(expr, alias);
@@ -392,19 +373,20 @@ class ExpressionResolver implements ContextRewriter {
     }
 
     // checks if expr is evaluable
-    public boolean isEvaluable(String expr, CandidateTable cTable) {
+    boolean isEvaluable(String expr, CandidateTable cTable) {
       ExpressionContext ec = getExpressionContext(expr, 
cubeql.getAliasForTableName(cTable.getBaseTable().getName()));
       return ec.isEvaluable(cTable);
     }
 
-    public Set<Dimension> rewriteExprCtx(CubeQueryContext cubeql, 
CandidateFact cfact, Map<Dimension,
-      CandidateDim> dimsToQuery, QueryAST queryAST) throws LensException {
+    Set<Dimension> rewriteExprCtx(CubeQueryContext cubeql, StorageCandidate sc,
+        Map<Dimension, CandidateDim> dimsToQuery,
+      QueryAST queryAST) throws LensException {
       Set<Dimension> exprDims = new HashSet<Dimension>();
-      log.info("Picking expressions for fact {} ", cfact);
+      log.info("Picking expressions for candidate {} ", sc);
       if (!allExprsQueried.isEmpty()) {
         // pick expressions for fact
-        if (cfact != null) {
-          pickExpressionsForTable(cfact);
+        if (sc != null) {
+          pickExpressionsForTable(sc);
         }
         // pick expressions for dimensions
         if (dimsToQuery != null && !dimsToQuery.isEmpty()) {
@@ -412,17 +394,17 @@ class ExpressionResolver implements ContextRewriter {
             pickExpressionsForTable(cdim);
           }
         }
-        log.info("Picked expressions: {}", pickedExpressions);
+        log.debug("Picked expressions: {}", pickedExpressions);
         for (Set<PickedExpression> peSet : pickedExpressions.values()) {
           for (PickedExpression pe : peSet) {
             exprDims.addAll(pe.pickedCtx.exprDims);
             pe.initRewrittenAST(pe.pickedCtx.deNormCtx.hasReferences());
-            
exprDims.addAll(pe.pickedCtx.deNormCtx.rewriteDenormctxInExpression(cubeql, 
cfact, dimsToQuery,
+            
exprDims.addAll(pe.pickedCtx.deNormCtx.rewriteDenormctxInExpression(cubeql, sc, 
dimsToQuery,
               pe.getRewrittenAST()));
           }
         }
         // Replace picked expressions in all the base trees
-        replacePickedExpressions(cfact, queryAST);
+        replacePickedExpressions(sc, queryAST);
       }
 
       pickedExpressions.clear();
@@ -430,13 +412,11 @@ class ExpressionResolver implements ContextRewriter {
       return exprDims;
     }
 
-    private void replacePickedExpressions(CandidateFact cfact, QueryAST 
queryAST)
+    private void replacePickedExpressions(StorageCandidate sc, QueryAST 
queryAST)
       throws LensException {
       replaceAST(cubeql, queryAST.getSelectAST());
-      if (cfact != null) {
-        for (ASTNode storageWhereClauseAST : 
cfact.getStorgeWhereClauseMap().values()) {
-          replaceAST(cubeql, storageWhereClauseAST);
-        }
+      if (sc != null) {
+        replaceAST(cubeql, sc.getQueryAst().getWhereAST());
       } else {
         replaceAST(cubeql, queryAST.getWhereAST());
       }
@@ -445,7 +425,7 @@ class ExpressionResolver implements ContextRewriter {
       // Having AST is resolved by each fact, so that all facts can expand 
their expressions.
       // Having ast is not copied now, it's maintained in cubeql, each fact 
processes that serially.
       replaceAST(cubeql, cubeql.getHavingAST());
-      replaceAST(cubeql, cubeql.getOrderByAST());
+      replaceAST(cubeql, queryAST.getOrderByAST());
     }
 
     private void replaceAST(final CubeQueryContext cubeql, ASTNode node) 
throws LensException {
@@ -453,27 +433,25 @@ class ExpressionResolver implements ContextRewriter {
         return;
       }
       // Traverse the tree and resolve expression columns
-      HQLParser.bft(node, new ASTNodeVisitor() {
-        @Override
-        public void visit(TreeNode visited) throws LensException {
-          ASTNode node = visited.getNode();
-          int childcount = node.getChildCount();
-          for (int i = 0; i < childcount; i++) {
-            ASTNode current = (ASTNode) node.getChild(i);
-            if (current.getToken().getType() == DOT) {
-              // This is for the case where column name is prefixed by table 
name
-              // or table alias
-              // For example 'select fact.id, dim2.id ...'
-              // Right child is the column name, left child.ident is table name
-              ASTNode tabident = HQLParser.findNodeByPath(current, 
TOK_TABLE_OR_COL, Identifier);
-              ASTNode colIdent = (ASTNode) current.getChild(1);
-              String column = colIdent.getText().toLowerCase();
-
-              if (pickedExpressions.containsKey(column)) {
-                PickedExpression expr = getPickedExpression(column, 
tabident.getText().toLowerCase());
-                if (expr != null) {
-                  node.setChild(i, replaceAlias(expr.getRewrittenAST(), 
cubeql));
-                }
+      HQLParser.bft(node, visited -> {
+        ASTNode node1 = visited.getNode();
+        int childcount = node1.getChildCount();
+        for (int i = 0; i < childcount; i++) {
+          ASTNode current = (ASTNode) node1.getChild(i);
+          if (current.getToken().getType() == DOT) {
+            // This is for the case where column name is prefixed by table name
+            // or table alias
+            // For example 'select fact.id, dim2.id ...'
+            // Right child is the column name, left child.ident is table name
+            ASTNode tabident = HQLParser.findNodeByPath(current, 
TOK_TABLE_OR_COL, Identifier);
+            ASTNode colIdent = (ASTNode) current.getChild(1);
+            String column = colIdent.getText().toLowerCase();
+
+            if (pickedExpressions.containsKey(column)) {
+              assert tabident != null;
+              PickedExpression expr = getPickedExpression(column, 
tabident.getText().toLowerCase());
+              if (expr != null) {
+                node1.setChild(i, replaceAlias(expr.getRewrittenAST(), 
cubeql));
               }
             }
           }
@@ -502,12 +480,8 @@ class ExpressionResolver implements ContextRewriter {
               log.debug("{} is not directly evaluable in {}", ec, cTable);
               if (ec.evaluableExpressions.get(cTable) != null && 
!ec.evaluableExpressions.get(cTable).isEmpty()) {
                 // pick first evaluable expression
-                Set<PickedExpression> peSet = 
pickedExpressions.get(ecEntry.getKey());
-                if (peSet == null) {
-                  peSet = new HashSet<PickedExpression>();
-                  pickedExpressions.put(ecEntry.getKey(), peSet);
-                }
-                peSet.add(new PickedExpression(ec.srcAlias, 
ec.evaluableExpressions.get(cTable).iterator().next()));
+                pickedExpressions.computeIfAbsent(ecEntry.getKey(), k -> new 
HashSet<>())
+                  .add(new PickedExpression(ec.srcAlias, 
ec.evaluableExpressions.get(cTable).iterator().next()));
               }
             }
           }
@@ -616,7 +590,7 @@ class ExpressionResolver implements ContextRewriter {
       for (Map.Entry<String, Set<String>> entry : 
cubeql.getTblAliasToColumns().entrySet()) {
         String alias = entry.getKey();
         // skip default alias
-        if (alias == CubeQueryContext.DEFAULT_TABLE) {
+        if (Objects.equals(alias, CubeQueryContext.DEFAULT_TABLE)) {
           continue;
         }
         AbstractCubeTable tbl = cubeql.getCubeTableForAlias(alias);
@@ -646,41 +620,39 @@ class ExpressionResolver implements ContextRewriter {
       // prune invalid expressions
       cubeql.getExprCtx().pruneExpressions();
       // prune candidate facts without any valid expressions
-      if (cubeql.getCube() != null && !cubeql.getCandidateFacts().isEmpty()) {
+      if (cubeql.getCube() != null && !cubeql.getCandidates().isEmpty()) {
         for (Map.Entry<String, Set<ExpressionContext>> ecEntry : 
exprCtx.allExprsQueried.entrySet()) {
           String expr = ecEntry.getKey();
           Set<ExpressionContext> ecSet = ecEntry.getValue();
           for (ExpressionContext ec : ecSet) {
             if (ec.getSrcTable().getName().equals(cubeql.getCube().getName())) 
{
               if (cubeql.getQueriedExprsWithMeasures().contains(expr)) {
-                for (Iterator<Set<CandidateFact>> sItr = 
cubeql.getCandidateFactSets().iterator(); sItr.hasNext();) {
-                  Set<CandidateFact> factSet = sItr.next();
-                  boolean evaluableInSet = false;
-                  for (CandidateFact cfact : factSet) {
-                    if (ec.isEvaluable(cfact)) {
-                      evaluableInSet = true;
-                    }
-                  }
-                  if (!evaluableInSet) {
-                    log.info("Not considering fact table set:{} as {} is not 
evaluable", factSet, ec.exprCol.getName());
+                for (Iterator<Candidate> sItr = 
cubeql.getCandidates().iterator(); sItr.hasNext();) {
+                  Candidate cand = sItr.next();
+                  if (!cand.isExpressionEvaluable(ec)) {
+                    log.info("Not considering Candidate :{} as {} is not 
evaluable", cand, ec.exprCol.getName());
                     sItr.remove();
+                    cubeql.addCandidatePruningMsg(cand,
+                        
CandidateTablePruneCause.expressionNotEvaluable(ec.exprCol.getName()));
                   }
                 }
               } else {
-                for (Iterator<CandidateFact> i = 
cubeql.getCandidateFacts().iterator(); i.hasNext();) {
-                  CandidateFact cfact = i.next();
-                  if (!ec.isEvaluable(cfact)) {
-                    log.info("Not considering fact table:{} as {} is not 
evaluable", cfact, ec.exprCol.getName());
-                    cubeql.addFactPruningMsgs(cfact.fact,
-                      
CandidateTablePruneCause.expressionNotEvaluable(ec.exprCol.getName()));
-                    i.remove();
+                // prune dimension only expressions
+                Set<StorageCandidate> storageCandidates = 
CandidateUtil.getStorageCandidates(cubeql.getCandidates());
+                for (StorageCandidate sc : storageCandidates) {
+                  if (!sc.isExpressionEvaluable(ec)) {
+                    Collection<Candidate> prunedCandidates =
+                        CandidateUtil.filterCandidates(cubeql.getCandidates(), 
sc);
+                    log.info("Not considering candidate(s) :{} as expr :{} in 
storage :{} is not evaluable",
+                        prunedCandidates, ec.exprCol.getName(), sc);
+                    cubeql.addStoragePruningMsg(sc,
+                        
CandidateTablePruneCause.expressionNotEvaluable(ec.exprCol.getName()));
                   }
                 }
               }
             }
           }
         }
-        
cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCode.EXPRESSION_NOT_EVALUABLE);
       }
       // prune candidate dims without any valid expressions
       if (cubeql.getDimensions() != null && !cubeql.getDimensions().isEmpty()) 
{
@@ -707,24 +679,21 @@ class ExpressionResolver implements ContextRewriter {
   }
 
   private static ASTNode replaceAlias(final ASTNode expr, final 
CubeQueryContext cubeql) throws LensException {
-    ASTNode finalAST = MetastoreUtil.copyAST(expr);
-    HQLParser.bft(finalAST, new ASTNodeVisitor() {
-      @Override
-      public void visit(TreeNode visited) {
-        ASTNode node = visited.getNode();
-        ASTNode parent = null;
-        if (visited.getParent() != null) {
-          parent = visited.getParent().getNode();
-        }
-
-        if (node.getToken().getType() == TOK_TABLE_OR_COL && (parent != null 
&& parent.getToken().getType() == DOT)) {
-          ASTNode current = (ASTNode) node.getChild(0);
-          if (current.getToken().getType() == Identifier) {
-            String tableName = current.getToken().getText().toLowerCase();
-            String alias = cubeql.getAliasForTableName(tableName);
-            if (!alias.equalsIgnoreCase(tableName)) {
-              node.setChild(0, new ASTNode(new 
CommonToken(HiveParser.Identifier, alias)));
-            }
+    final ASTNode finalAST = MetastoreUtil.copyAST(expr);
+    HQLParser.bft(finalAST, visited -> {
+      ASTNode node = visited.getNode();
+      ASTNode parent = null;
+      if (visited.getParent() != null) {
+        parent = visited.getParent().getNode();
+      }
+
+      if (node.getToken().getType() == TOK_TABLE_OR_COL && (parent != null && 
parent.getToken().getType() == DOT)) {
+        ASTNode current = (ASTNode) node.getChild(0);
+        if (current.getToken().getType() == Identifier) {
+          String tableName = current.getToken().getText().toLowerCase();
+          String alias = cubeql.getAliasForTableName(tableName);
+          if (!alias.equalsIgnoreCase(tableName)) {
+            node.setChild(0, new ASTNode(new 
CommonToken(HiveParser.Identifier, alias)));
           }
         }
       }
@@ -738,33 +707,30 @@ class ExpressionResolver implements ContextRewriter {
       return;
     }
     // Traverse the tree and resolve expression columns
-    HQLParser.bft(expr, new ASTNodeVisitor() {
-      @Override
-      public void visit(TreeNode visited) throws LensException {
-        ASTNode node = visited.getNode();
-        int childcount = node.getChildCount();
-        for (int i = 0; i < childcount; i++) {
-          ASTNode current = (ASTNode) node.getChild(i);
-          if (current.getToken().getType() == TOK_TABLE_OR_COL && (node != 
null && node.getToken().getType() != DOT)) {
-            // Take child ident.totext
-            ASTNode ident = (ASTNode) current.getChild(0);
-            String column = ident.getText().toLowerCase();
-            if (toReplace.equals(column)) {
-              node.setChild(i, MetastoreUtil.copyAST(columnAST));
-            }
-          } else if (current.getToken().getType() == DOT) {
-            // This is for the case where column name is prefixed by table name
-            // or table alias
-            // For example 'select fact.id, dim2.id ...'
-            // Right child is the column name, left child.ident is table name
-            ASTNode tabident = HQLParser.findNodeByPath(current, 
TOK_TABLE_OR_COL, Identifier);
-            ASTNode colIdent = (ASTNode) current.getChild(1);
-
-            String column = colIdent.getText().toLowerCase();
-
-            if (toReplace.equals(column)) {
-              node.setChild(i, MetastoreUtil.copyAST(columnAST));
-            }
+    HQLParser.bft(expr, visited -> {
+      ASTNode node = visited.getNode();
+      int childcount = node.getChildCount();
+      for (int i = 0; i < childcount; i++) {
+        ASTNode current = (ASTNode) node.getChild(i);
+        if (current.getToken().getType() == TOK_TABLE_OR_COL && 
node.getToken().getType() != DOT) {
+          // Take child ident.totext
+          ASTNode ident = (ASTNode) current.getChild(0);
+          String column = ident.getText().toLowerCase();
+          if (toReplace.equals(column)) {
+            node.setChild(i, MetastoreUtil.copyAST(columnAST));
+          }
+        } else if (current.getToken().getType() == DOT) {
+          // This is for the case where column name is prefixed by table name
+          // or table alias
+          // For example 'select fact.id, dim2.id ...'
+          // Right child is the column name, left child.ident is table name
+          ASTNode tabident = HQLParser.findNodeByPath(current, 
TOK_TABLE_OR_COL, Identifier);
+          ASTNode colIdent = (ASTNode) current.getChild(1);
+
+          String column = colIdent.getText().toLowerCase();
+
+          if (toReplace.equals(column)) {
+            node.setChild(i, MetastoreUtil.copyAST(columnAST));
           }
         }
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
index 48af0c9..94f9c7d 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/FieldValidator.java
@@ -104,7 +104,6 @@ public class FieldValidator implements ContextRewriter {
           conflictingFields.addAll(queriedMsrs);
           throw new FieldsCannotBeQueriedTogetherException(new 
ConflictingFields(conflictingFields));
         } else {
-
           conflictingFields.addAll(queriedMsrs);
           throw new FieldsCannotBeQueriedTogetherException(new 
ConflictingFields(conflictingFields));
         }

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
index 052b87a..1b30c0b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *   http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
@@ -18,10 +18,11 @@
  */
 package org.apache.lens.cube.parse;
 
+import static 
org.apache.lens.cube.parse.ColumnResolver.addColumnsForSelectExpr;
+
 import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 
-import java.util.ArrayList;
-import java.util.List;
+import java.util.*;
 
 import org.apache.lens.cube.metadata.AbstractBaseTable;
 import org.apache.lens.server.api.error.LensException;
@@ -42,6 +43,7 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 class GroupbyResolver implements ContextRewriter {
 
+  private static final String SELECT_ALIAS_PREFIX = "select_expr";
   private final boolean selectPromotionEnabled;
   private final boolean groupbyPromotionEnabled;
 
@@ -53,8 +55,8 @@ class GroupbyResolver implements ContextRewriter {
         CubeQueryConfUtil.DEFAULT_ENABLE_GROUP_BY_TO_SELECT);
   }
 
-  private void promoteSelect(CubeQueryContext cubeql, 
List<SelectPhraseContext> selectExprs,
-    List<String> groupByExprs) throws LensException {
+  private void promoteSelect(CubeQueryContext cubeql, 
List<SelectPhraseContext> selectExprs, List<String> groupByExprs)
+    throws LensException {
     if (!selectPromotionEnabled) {
       return;
     }
@@ -79,7 +81,7 @@ class GroupbyResolver implements ContextRewriter {
                 groupbyAST.addChild(exprAST);
               } else {
                 // no group by ast exist, create one
-                ASTNode newAST = new ASTNode(new CommonToken(TOK_GROUPBY));
+                ASTNode newAST = new ASTNode(new CommonToken(TOK_GROUPBY, 
"TOK_GROUPBY"));
                 newAST.addChild(exprAST);
                 cubeql.setGroupByAST(newAST);
               }
@@ -97,7 +99,6 @@ class GroupbyResolver implements ContextRewriter {
     return node != null && node.getToken() != null && !hasTableOrColumn(node);
   }
 
-
   /*
    * Check if table or column used in node
    */
@@ -115,8 +116,7 @@ class GroupbyResolver implements ContextRewriter {
     return false;
   }
 
-  private void promoteGroupby(CubeQueryContext cubeql, 
List<SelectPhraseContext> selectExprs,
-                              List<String> groupByExprs)
+  private void promoteGroupby(CubeQueryContext cubeql, 
List<SelectPhraseContext> selectExprs, List<String> groupByExprs)
     throws LensException {
     if (!groupbyPromotionEnabled) {
       return;
@@ -131,12 +131,44 @@ class GroupbyResolver implements ContextRewriter {
     for (String expr : groupByExprs) {
       if (!contains(selectExprs, expr)) {
         ASTNode exprAST = HQLParser.parseExpr(expr, cubeql.getConf());
-        addChildAtIndex(index, cubeql.getSelectAST(), exprAST);
+        ASTNode parent = new ASTNode(new CommonToken(HiveParser.TOK_SELEXPR, 
"TOK_SELEXPR"));
+        parent.addChild(exprAST);
+        exprAST.setParent(parent);
+        addChildAtIndex(index, cubeql.getSelectAST(), parent);
+        updateSelectPhrase(cubeql, index, parent);
         index++;
       }
     }
   }
 
+  private void updateSelectPhrase(CubeQueryContext cubeql, int index, ASTNode 
selectExpr) {
+    int exprInd = index;
+    ASTNode selectExprChild = (ASTNode) selectExpr.getChild(0);
+    Set<String> cols = new HashSet<>();
+    SelectPhraseContext sel = new SelectPhraseContext(selectExpr);
+    addColumnsForSelectExpr(sel, selectExpr, cubeql.getSelectAST(), cols);
+    String alias = selectExpr.getChildCount() > 1 ? 
selectExpr.getChild(1).getText() : null;
+    String selectAlias;
+    String selectFinalAlias = null;
+    if (alias != null) {
+      selectFinalAlias = alias;
+      selectAlias = SELECT_ALIAS_PREFIX + exprInd;
+    } else if (cols.size() == 1 && (selectExprChild.getToken().getType() == 
TOK_TABLE_OR_COL
+      || selectExprChild.getToken().getType() == DOT)) {
+      // select expression is same as the column
+      selectAlias = cols.iterator().next().toLowerCase();
+    } else {
+      selectAlias = SELECT_ALIAS_PREFIX + exprInd;
+      selectFinalAlias = HQLParser.getString(selectExprChild);
+    }
+    cubeql.addColumnsQueried(sel.getTblAliasToColumns());
+    sel.setSelectAlias(selectAlias);
+    sel.setFinalAlias(!StringUtils.isBlank(selectFinalAlias) ? "`" + 
selectFinalAlias + "`" : selectAlias);
+    sel.setActualAlias(alias != null ? alias.toLowerCase() : null);
+    cubeql.getSelectPhrases().add(exprInd, sel);
+    //cubeql.addSelectPhrase(sel);
+  }
+
   private void addChildAtIndex(int index, ASTNode parent, ASTNode child) {
     // add the last child
     int count = parent.getChildCount();
@@ -158,7 +190,7 @@ class GroupbyResolver implements ContextRewriter {
     List<SelectPhraseContext> selectExprs = 
getSelectNonAggregateNonMeasureExpressions(cubeql);
     List<String> groupByExprs = new ArrayList<>();
     if (cubeql.getGroupByString() != null) {
-      String[] gby = getGroupbyExpressions(cubeql.getGroupByAST()).toArray(new 
String[]{});
+      String[] gby = getGroupbyExpressions(cubeql.getGroupByAST()).toArray(new 
String[] {});
       for (String g : gby) {
         groupByExprs.add(g.trim());
       }
@@ -228,7 +260,7 @@ class GroupbyResolver implements ContextRewriter {
       // by the time Groupby resolver is looking for aggregate, all columns 
should be aliased with correct
       // alias name.
       if (cubeql.getCubeTableForAlias(alias) instanceof AbstractBaseTable) {
-        if 
(((AbstractBaseTable)cubeql.getCubeTableForAlias(alias)).getExpressionByName(colname)
 != null) {
+        if (((AbstractBaseTable) 
cubeql.getCubeTableForAlias(alias)).getExpressionByName(colname) != null) {
           return cubeql.getExprCtx().getExpressionContext(colname, 
alias).hasAggregates();
         }
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
new file mode 100644
index 0000000..52085ea
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import java.util.*;
+
+import org.apache.lens.cube.metadata.FactPartition;
+import org.apache.lens.cube.metadata.TimeRange;
+import org.apache.lens.server.api.error.LensException;
+
+/**
+ * Represents a join of two candidates
+ */
+public class JoinCandidate implements Candidate {
+
+  /**
+   * Child candidates that will participate in the join
+   */
+  private Candidate childCandidate1;
+  private Candidate childCandidate2;
+  private String toStr;
+  private QueryAST queryAST;
+  private CubeQueryContext cubeql;
+
+  public JoinCandidate(Candidate childCandidate1, Candidate childCandidate2, 
CubeQueryContext cubeql) {
+    this.childCandidate1 = childCandidate1;
+    this.childCandidate2 = childCandidate2;
+    this.cubeql = cubeql;
+  }
+
+  @Override
+  public Collection<String> getColumns() {
+    Set<String> columns = new HashSet<>();
+    columns.addAll(childCandidate1.getColumns());
+    columns.addAll(childCandidate2.getColumns());
+    return columns;
+  }
+
+  @Override
+  public Date getStartTime() {
+    return childCandidate1.getStartTime().after(childCandidate2.getStartTime())
+        ? childCandidate1.getStartTime() : childCandidate2.getStartTime();
+  }
+
+  @Override
+  public Date getEndTime() {
+    return childCandidate1.getEndTime().before(childCandidate2.getEndTime())
+        ? childCandidate1.getEndTime() : childCandidate2.getEndTime();
+  }
+
+  @Override
+  public double getCost() {
+    return childCandidate1.getCost() + childCandidate2.getCost();
+  }
+
+  @Override
+  public boolean contains(Candidate candidate) {
+    if (this.equals(candidate)) {
+      return true;
+    } else {
+      return childCandidate1.contains(candidate) || 
childCandidate2.contains(candidate);
+    }
+  }
+
+  @Override
+  public Collection<Candidate> getChildren() {
+    ArrayList<Candidate> joinCandidates = new ArrayList<>();
+    joinCandidates.add(childCandidate1);
+    joinCandidates.add(childCandidate2);
+    return joinCandidates;
+  }
+
+  /**
+   * @param timeRange
+   * @return
+   */
+  @Override
+  public boolean evaluateCompleteness(TimeRange timeRange, TimeRange 
parentTimeRange, boolean failOnPartialData)
+    throws LensException {
+    return this.childCandidate1.evaluateCompleteness(timeRange, 
parentTimeRange, failOnPartialData)
+        && this.childCandidate2.evaluateCompleteness(timeRange, 
parentTimeRange, failOnPartialData);
+  }
+
+  /**
+   * @return all the partitions from the children
+   */
+  @Override
+  public Set<FactPartition> getParticipatingPartitions() {
+    Set<FactPartition> factPartitionsSet = new HashSet<>();
+    factPartitionsSet.addAll(childCandidate1.getParticipatingPartitions());
+    factPartitionsSet.addAll(childCandidate2.getParticipatingPartitions());
+    return factPartitionsSet;
+  }
+
+  @Override
+  public boolean isExpressionEvaluable(ExpressionResolver.ExpressionContext 
expr) {
+    return childCandidate1.isExpressionEvaluable(expr) || 
childCandidate2.isExpressionEvaluable(expr);
+  }
+
+  @Override
+  public Set<Integer> getAnswerableMeasurePhraseIndices() {
+    Set<Integer> mesureIndices = new HashSet<>();
+    for (Candidate cand : getChildren()) {
+      mesureIndices.addAll(cand.getAnswerableMeasurePhraseIndices());
+    }
+    return mesureIndices;
+  }
+
+  @Override
+  public boolean isTimeRangeCoverable(TimeRange timeRange) throws 
LensException {
+    return this.childCandidate1.isTimeRangeCoverable(timeRange)
+      && this.childCandidate2.isTimeRangeCoverable(timeRange);
+  }
+
+  @Override
+  public String toString() {
+    if (this.toStr == null) {
+      this.toStr = getToString();
+    }
+    return this.toStr;
+  }
+
+  private String getToString() {
+    return "JOIN[" + childCandidate1.toString() + ", " + 
childCandidate2.toString() + "]";
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index fce1662..02e3dc7 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -42,7 +42,10 @@ import lombok.extern.slf4j.Slf4j;
 class JoinResolver implements ContextRewriter {
   private Map<AbstractCubeTable, JoinType> tableJoinTypeMap;
   private AbstractCubeTable target;
-  private HashMap<Dimension, List<JoinChain>> dimensionInJoinChain = new 
HashMap<Dimension, List<JoinChain>>();
+  /**
+   * Dimension as key and all the participating join chains for this dimension 
as value.
+   */
+  private HashMap<Dimension, List<JoinChain>> dimensionToJoinChainsMap = new 
HashMap<Dimension, List<JoinChain>>();
 
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws LensException {
@@ -91,10 +94,10 @@ class JoinResolver implements ContextRewriter {
       dims.add(chain.getDestTable());
       for (String dim : dims) {
         Dimension dimension = cubeql.getMetastoreClient().getDimension(dim);
-        if (dimensionInJoinChain.get(dimension) == null) {
-          dimensionInJoinChain.put(dimension, new ArrayList<JoinChain>());
+        if (dimensionToJoinChainsMap.get(dimension) == null) {
+          dimensionToJoinChainsMap.put(dimension, new ArrayList<JoinChain>());
         }
-        dimensionInJoinChain.get(dimension).add(chain);
+        dimensionToJoinChainsMap.get(dimension).add(chain);
       }
     }
   }
@@ -139,7 +142,7 @@ class JoinResolver implements ContextRewriter {
 
     Map<Aliased<Dimension>, List<JoinPath>> multipleJoinPaths = new 
LinkedHashMap<>();
 
-    // populate paths from joinchains
+    // populate paths from joinchains. For a destination Dimension get all the 
join paths that lead to it.
     for (JoinChain chain : cubeql.getJoinchains().values()) {
       Dimension dimension = 
cubeql.getMetastoreClient().getDimension(chain.getDestTable());
       Aliased<Dimension> aliasedDimension = Aliased.create(dimension, 
chain.getName());
@@ -149,6 +152,7 @@ class JoinResolver implements ContextRewriter {
       multipleJoinPaths.get(aliasedDimension).addAll(
         chain.getRelationEdges(cubeql.getMetastoreClient()));
     }
+
     boolean flattenBridgeTables = 
cubeql.getConf().getBoolean(CubeQueryConfUtil.ENABLE_FLATTENING_FOR_BRIDGETABLES,
       CubeQueryConfUtil.DEFAULT_ENABLE_FLATTENING_FOR_BRIDGETABLES);
     String bridgeTableFieldAggr = 
cubeql.getConf().get(CubeQueryConfUtil.BRIDGE_TABLE_FIELD_AGGREGATOR,

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
index 0bc7f82..a9bd164 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *   http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
@@ -18,9 +18,11 @@
  */
 package org.apache.lens.cube.parse;
 
-import java.util.*;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
 
-import 
org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 import org.apache.lens.server.api.error.LensException;
 
 import lombok.extern.slf4j.Slf4j;
@@ -33,35 +35,36 @@ class LeastPartitionResolver implements ContextRewriter {
 
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws LensException {
-    if (cubeql.getCube() != null && !cubeql.getCandidateFactSets().isEmpty()) {
-      Map<Set<CandidateFact>, Integer> factPartCount = new 
HashMap<Set<CandidateFact>, Integer>();
+    if (cubeql.getCube() != null && !cubeql.getCandidates().isEmpty()) {
+      Map<Candidate, Integer> factPartCount = new HashMap<>();
 
       //The number of partitions being calculated is not the actual number of 
partitions,
       // they are number of time values now instead of partitions.
       // This seems fine, as the less number of time values actually represent 
the rollups on time. And with
       // MaxCoveringFactResolver facts with less partitions which are not 
covering the range would be removed.
-      for (Set<CandidateFact> facts : cubeql.getCandidateFactSets()) {
-        factPartCount.put(facts, getPartCount(facts));
+      for (Candidate candidate : cubeql.getCandidates()) {
+        factPartCount.put(candidate, getPartCount(candidate));
       }
 
       double minPartitions = Collections.min(factPartCount.values());
 
-      for (Iterator<Set<CandidateFact>> i = 
cubeql.getCandidateFactSets().iterator(); i.hasNext();) {
-        Set<CandidateFact> facts = i.next();
-        if (factPartCount.get(facts) > minPartitions) {
-          log.info("Not considering facts:{} from candidate fact tables as it 
requires more partitions to be"
-            + " queried:{} minimum:{}", facts, factPartCount.get(facts), 
minPartitions);
+      for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); 
i.hasNext();) {
+        Candidate candidate = i.next();
+        if (factPartCount.get(candidate) > minPartitions) {
+          log.info("Not considering Candidate:{} as it requires more 
partitions to be" + " queried:{} minimum:{}",
+            candidate, factPartCount.get(candidate), minPartitions);
           i.remove();
+          cubeql.addCandidatePruningMsg(candidate,
+            new 
CandidateTablePruneCause(CandidateTablePruneCause.CandidateTablePruneCode.MORE_PARTITIONS));
         }
       }
-      
cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCode.MORE_PARTITIONS);
     }
   }
 
-  private int getPartCount(Set<CandidateFact> set) {
+  private int getPartCount(Candidate candidate) {
     int parts = 0;
-    for (CandidateFact f : set) {
-      parts += f.getNumQueriedParts();
+    for (StorageCandidate sc : CandidateUtil.getStorageCandidates(candidate)) {
+      parts += sc.getNumQueriedParts();
     }
     return parts;
   }

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
index 4356401..dd25f3e 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/LightestFactResolver.java
@@ -34,32 +34,24 @@ public class LightestFactResolver implements 
ContextRewriter {
 
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws LensException {
-    if (cubeql.getCube() != null && !cubeql.getCandidateFactSets().isEmpty()) {
-      Map<Set<CandidateFact>, Double> factWeightMap = new 
HashMap<Set<CandidateFact>, Double>();
+    if (cubeql.getCube() != null && !cubeql.getCandidates().isEmpty()) {
+      Map<Candidate, Double> factWeightMap = new HashMap<Candidate, Double>();
 
-      for (Set<CandidateFact> facts : cubeql.getCandidateFactSets()) {
-        factWeightMap.put(facts, getWeight(facts));
+      for (Candidate cand : cubeql.getCandidates()) {
+        factWeightMap.put(cand, cand.getCost());
       }
 
       double minWeight = Collections.min(factWeightMap.values());
 
-      for (Iterator<Set<CandidateFact>> i = 
cubeql.getCandidateFactSets().iterator(); i.hasNext();) {
-        Set<CandidateFact> facts = i.next();
-        if (factWeightMap.get(facts) > minWeight) {
-          log.info("Not considering facts:{} from candidate fact tables as it 
has more fact weight:{} minimum:{}",
-            facts, factWeightMap.get(facts), minWeight);
+      for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); 
i.hasNext();) {
+        Candidate cand = i.next();
+        if (factWeightMap.get(cand) > minWeight) {
+          log.info("Not considering candidate:{} from final candidates as it 
has more fact weight:{} minimum:{}",
+            cand, factWeightMap.get(cand), minWeight);
+          cubeql.addCandidatePruningMsg(cand, new 
CandidateTablePruneCause(CandidateTablePruneCode.MORE_WEIGHT));
           i.remove();
         }
       }
-      
cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCode.MORE_WEIGHT);
     }
   }
-
-  private Double getWeight(Set<CandidateFact> set) {
-    Double weight = 0.0;
-    for (CandidateFact f : set) {
-      weight += f.fact.weight();
-    }
-    return weight;
-  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
index 2822857..34180d1 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/MaxCoveringFactResolver.java
@@ -7,7 +7,7 @@
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
  *
- *   http://www.apache.org/licenses/LICENSE-2.0
+ * http://www.apache.org/licenses/LICENSE-2.0
  *
  * Unless required by applicable law or agreed to in writing,
  * software distributed under the License is distributed on an
@@ -21,7 +21,6 @@ package org.apache.lens.cube.parse;
 import java.util.Collections;
 import java.util.Iterator;
 import java.util.Map;
-import java.util.Set;
 
 import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.metadata.UpdatePeriod;
@@ -31,11 +30,10 @@ import org.apache.lens.server.api.error.LensException;
 import org.apache.hadoop.conf.Configuration;
 
 import com.google.common.collect.Maps;
-
 import lombok.extern.slf4j.Slf4j;
 
 /**
- * Prune candidate fact sets so that the facts except the ones that are 
covering maximum of range are pruned
+ * Prune candidates except the ones that are covering maximum of range are 
pruned
  */
 @Slf4j
 class MaxCoveringFactResolver implements ContextRewriter {
@@ -53,7 +51,7 @@ class MaxCoveringFactResolver implements ContextRewriter {
       // redundant computation.
       return;
     }
-    if (cubeql.getCube() == null || cubeql.getCandidateFactSets().size() <= 1) 
{
+    if (cubeql.getCube() == null || cubeql.getCandidates().size() <= 1) {
       // nothing to prune.
       return;
     }
@@ -66,13 +64,13 @@ class MaxCoveringFactResolver implements ContextRewriter {
   private void resolveByTimeCovered(CubeQueryContext cubeql) {
     // For each part column, which candidate fact sets are covering how much 
amount.
     // Later, we'll maximize coverage for each queried part column.
-    Map<String, Map<Set<CandidateFact>, Long>> partCountsPerPartCol = 
Maps.newHashMap();
-    for (Set<CandidateFact> facts : cubeql.getCandidateFactSets()) {
-      for (Map.Entry<String, Long> entry : 
getTimeCoveredForEachPartCol(facts).entrySet()) {
+    Map<String, Map<Candidate, Long>> partCountsPerPartCol = Maps.newHashMap();
+    for (Candidate cand : cubeql.getCandidates()) {
+      for (Map.Entry<String, Long> entry : 
getTimeCoveredForEachPartCol(cand).entrySet()) {
         if (!partCountsPerPartCol.containsKey(entry.getKey())) {
-          partCountsPerPartCol.put(entry.getKey(), Maps.<Set<CandidateFact>, 
Long>newHashMap());
+          partCountsPerPartCol.put(entry.getKey(), Maps.<Candidate, 
Long>newHashMap());
         }
-        partCountsPerPartCol.get(entry.getKey()).put(facts, entry.getValue());
+        partCountsPerPartCol.get(entry.getKey()).put(cand, entry.getValue());
       }
     }
     // for each queried partition, prune fact sets that are covering less 
range than max
@@ -80,29 +78,30 @@ class MaxCoveringFactResolver implements ContextRewriter {
       if (partCountsPerPartCol.get(partColQueried) != null) {
         long maxTimeCovered = 
Collections.max(partCountsPerPartCol.get(partColQueried).values());
         TimeCovered timeCovered = new TimeCovered(maxTimeCovered);
-        Iterator<Set<CandidateFact>> iter = 
cubeql.getCandidateFactSets().iterator();
+        Iterator<Candidate> iter = cubeql.getCandidates().iterator();
         while (iter.hasNext()) {
-          Set<CandidateFact> facts = iter.next();
-          Long timeCoveredLong = 
partCountsPerPartCol.get(partColQueried).get(facts);
+          Candidate candidate = iter.next();
+          Long timeCoveredLong = 
partCountsPerPartCol.get(partColQueried).get(candidate);
           if (timeCoveredLong == null) {
             timeCoveredLong = 0L;
           }
           if (timeCoveredLong < maxTimeCovered) {
-            log.info("Not considering facts:{} from candidate fact tables as 
it covers less time than the max"
-                    + " for partition column: {} which is: {}", facts, 
partColQueried, timeCovered);
+            log.info("Not considering Candidate:{} from Candidate set as it 
covers less time than the max"
+              + " for partition column: {} which is: {}", candidate, 
partColQueried, timeCovered);
             iter.remove();
+            cubeql.addCandidatePruningMsg(candidate,
+              new 
CandidateTablePruneCause(CandidateTablePruneCause.CandidateTablePruneCode.LESS_DATA));
           }
         }
       }
     }
-    
cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.lessData(null));
   }
 
   private void resolveByDataCompleteness(CubeQueryContext cubeql) {
     // From the list of  candidate fact sets, we calculate the 
maxDataCompletenessFactor.
     float maxDataCompletenessFactor = 0f;
-    for (Set<CandidateFact> facts : cubeql.getCandidateFactSets()) {
-      float dataCompletenessFactor = computeDataCompletenessFactor(facts);
+    for (Candidate cand : cubeql.getCandidates()) {
+      float dataCompletenessFactor = computeDataCompletenessFactor(cand);
       if (dataCompletenessFactor > maxDataCompletenessFactor) {
         maxDataCompletenessFactor = dataCompletenessFactor;
       }
@@ -114,25 +113,26 @@ class MaxCoveringFactResolver implements ContextRewriter {
     }
 
     // We prune those candidate fact set, whose dataCompletenessFactor is less 
than maxDataCompletenessFactor
-    Iterator<Set<CandidateFact>> iter = 
cubeql.getCandidateFactSets().iterator();
+    Iterator<Candidate> iter = cubeql.getCandidates().iterator();
     while (iter.hasNext()) {
-      Set<CandidateFact> facts = iter.next();
-      float dataCompletenessFactor = computeDataCompletenessFactor(facts);
+      Candidate cand = iter.next();
+      float dataCompletenessFactor = computeDataCompletenessFactor(cand);
       if (dataCompletenessFactor < maxDataCompletenessFactor) {
-        log.info("Not considering facts:{} from candidate fact tables as the 
dataCompletenessFactor for this:{} is "
-                + "less than the max:{}", facts, dataCompletenessFactor, 
maxDataCompletenessFactor);
+        log.info("Not considering Candidate :{} from the list as the 
dataCompletenessFactor for this:{} is "
+          + "less than the max:{}", cand, dataCompletenessFactor, 
maxDataCompletenessFactor);
         iter.remove();
+        cubeql.addCandidatePruningMsg(cand,
+          new 
CandidateTablePruneCause(CandidateTablePruneCause.CandidateTablePruneCode.INCOMPLETE_PARTITION));
       }
     }
-    
cubeql.pruneCandidateFactWithCandidateSet(CandidateTablePruneCause.incompletePartitions(null));
   }
 
-  private float computeDataCompletenessFactor(Set<CandidateFact> facts) {
+  private float computeDataCompletenessFactor(Candidate cand) {
     float completenessFactor = 0f;
     int numPartition = 0;
-    for (CandidateFact fact : facts) {
-      if (fact.getDataCompletenessMap() != null) {
-        Map<String, Map<String, Float>> completenessMap = 
fact.getDataCompletenessMap();
+    for (StorageCandidate sc : CandidateUtil.getStorageCandidates(cand)) {
+      if (sc.getDataCompletenessMap() != null) {
+        Map<String, Map<String, Float>> completenessMap = 
sc.getDataCompletenessMap();
         for (Map<String, Float> partitionCompleteness : 
completenessMap.values()) {
           for (Float value : partitionCompleteness.values()) {
             numPartition++;
@@ -141,33 +141,30 @@ class MaxCoveringFactResolver implements ContextRewriter {
         }
       }
     }
-    return numPartition == 0 ? completenessFactor : 
completenessFactor/numPartition;
+    return numPartition == 0 ? completenessFactor : completenessFactor / 
numPartition;
   }
 
   /**
    * Returns time covered by fact set for each part column.
-   * @param facts
+   *
+   * @param cand
    * @return
    */
-  private Map<String, Long> getTimeCoveredForEachPartCol(Set<CandidateFact> 
facts) {
+  private Map<String, Long> getTimeCoveredForEachPartCol(Candidate cand) {
     Map<String, Long> ret = Maps.newHashMap();
     UpdatePeriod smallest = UpdatePeriod.values()[UpdatePeriod.values().length 
- 1];
-    for (CandidateFact fact : facts) {
-      for (FactPartition part : fact.getPartsQueried()) {
-        if (part.getPeriod().compareTo(smallest) < 0) {
-          smallest = part.getPeriod();
-        }
+    for (FactPartition part : cand.getParticipatingPartitions()) {
+      if (part.getPeriod().compareTo(smallest) < 0) {
+        smallest = part.getPeriod();
       }
     }
     PartitionRangesForPartitionColumns partitionRangesForPartitionColumns = 
new PartitionRangesForPartitionColumns();
-    for (CandidateFact fact : facts) {
-      for (FactPartition part : fact.getPartsQueried()) {
-        if (part.isFound()) {
-          try {
-            partitionRangesForPartitionColumns.add(part);
-          } catch (LensException e) {
-            log.error("invalid partition: ", e);
-          }
+    for (FactPartition part : cand.getParticipatingPartitions()) {
+      if (part.isFound()) {
+        try {
+          partitionRangesForPartitionColumns.add(part);
+        } catch (LensException e) {
+          log.error("invalid partition: ", e);
         }
       }
     }
@@ -196,17 +193,8 @@ class MaxCoveringFactResolver implements ContextRewriter {
     }
 
     public String toString() {
-      return new StringBuilder()
-        .append(days)
-        .append(" days, ")
-        .append(hours)
-        .append(" hours, ")
-        .append(minutes)
-        .append(" minutes, ")
-        .append(seconds)
-        .append(" seconds, ")
-        .append(milliseconds)
-        .append(" milliseconds.").toString();
+      return String.valueOf(days) + " days, " + hours + " hours, " + minutes
+        + " minutes, " + seconds + " seconds, " + milliseconds + " 
milliseconds.";
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
deleted file mode 100644
index 979c24b..0000000
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiFactHQLContext.java
+++ /dev/null
@@ -1,238 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import static org.apache.lens.cube.parse.HQLParser.*;
-
-import java.util.*;
-
-import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-
-import org.antlr.runtime.CommonToken;
-
-import com.google.common.collect.Lists;
-import lombok.extern.slf4j.Slf4j;
-
-/**
- * Writes a join query with all the facts involved, with where, groupby and 
having expressions pushed down to the fact
- * queries.
- */
-@Slf4j
-class MultiFactHQLContext extends SimpleHQLContext {
-
-  private Set<CandidateFact> facts;
-  private CubeQueryContext query;
-  private Map<CandidateFact, SimpleHQLContext> factHQLContextMap = new 
HashMap<>();
-
-  MultiFactHQLContext(Set<CandidateFact> facts, Map<Dimension, CandidateDim> 
dimsToQuery,
-    Map<CandidateFact, Set<Dimension>> factDimMap, CubeQueryContext query) 
throws LensException {
-    super();
-    this.query = query;
-    this.facts = facts;
-    for (CandidateFact fact : facts) {
-      if (fact.getStorageTables().size() > 1) {
-        factHQLContextMap.put(fact, new SingleFactMultiStorageHQLContext(fact, 
dimsToQuery, query, fact));
-      } else {
-        factHQLContextMap.put(fact,
-          new SingleFactSingleStorageHQLContext(fact, dimsToQuery, 
factDimMap.get(fact), query,
-            DefaultQueryAST.fromCandidateFact(fact, 
fact.getStorageTables().iterator().next(), fact)));
-      }
-    }
-  }
-
-  protected void setMissingExpressions() throws LensException {
-    setSelect(getSelectString());
-    setFrom(getFromString());
-    setWhere(getWhereString());
-    setGroupby(getGroupbyString());
-    setHaving(getHavingString());
-    setOrderby(getOrderbyString());
-  }
-
-  private String getOrderbyString() {
-    return query.getOrderByString();
-  }
-
-  private String getHavingString() {
-    return null;
-  }
-
-  private String getGroupbyString() {
-    return null;
-  }
-
-  private String getWhereString() {
-    return query.getWhereString();
-  }
-
-  public String toHQL() throws LensException {
-    return query.getInsertClause() + super.toHQL();
-  }
-
-  private String getSelectString() throws LensException {
-    Map<Integer, List<Integer>> selectToFactIndex = new 
HashMap<>(query.getSelectAST().getChildCount());
-    int fi = 1;
-    for (CandidateFact fact : facts) {
-      for (int ind : fact.getSelectIndices()) {
-        if (!selectToFactIndex.containsKey(ind)) {
-          selectToFactIndex.put(ind, Lists.<Integer>newArrayList());
-        }
-        selectToFactIndex.get(ind).add(fi);
-      }
-      fi++;
-    }
-    StringBuilder select = new StringBuilder();
-    for (int i = 0; i < query.getSelectAST().getChildCount(); i++) {
-      if (selectToFactIndex.get(i) == null) {
-        throw new 
LensException(LensCubeErrorCode.EXPRESSION_NOT_IN_ANY_FACT.getLensErrorInfo(),
-          HQLParser.getString((ASTNode) query.getSelectAST().getChild(i)));
-      }
-      if (selectToFactIndex.get(i).size() == 1) {
-        select.append("mq").append(selectToFactIndex.get(i).get(0)).append(".")
-          .append(query.getSelectPhrases().get(i).getSelectAlias()).append(" 
");
-      } else {
-        select.append("COALESCE(");
-        String sep = "";
-        for (Integer factIndex : selectToFactIndex.get(i)) {
-          select.append(sep).append("mq").append(factIndex).append(".").append(
-            query.getSelectPhrases().get(i).getSelectAlias());
-          sep = ", ";
-        }
-        select.append(") ");
-      }
-      select.append(query.getSelectPhrases().get(i).getFinalAlias());
-      if (i != query.getSelectAST().getChildCount() - 1) {
-        select.append(", ");
-      }
-    }
-    return select.toString();
-  }
-
-  private String getMultiFactJoinCondition(int i, String dim) {
-    StringBuilder joinCondition = new StringBuilder();
-    if (i <= 1) {
-      return "".toString();
-    } else {
-      joinCondition.append("mq").append(i - 
2).append(".").append(dim).append(" <=> ").
-          append("mq").append(i - 1).append(".").append(dim);
-    }
-    return joinCondition.toString();
-  }
-
-  private String getFromString() throws LensException {
-    StringBuilder fromBuilder = new StringBuilder();
-    int aliasCount = 1;
-    String sep = "";
-    for (CandidateFact fact : facts) {
-      SimpleHQLContext facthql = factHQLContextMap.get(fact);
-      
fromBuilder.append(sep).append("(").append(facthql.toHQL()).append(")").append("
 mq").append(aliasCount++);
-      sep = " full outer join ";
-      if (!fact.getDimFieldIndices().isEmpty() && aliasCount > 2) {
-        fromBuilder.append(" on ");
-        Iterator<Integer> dimIter = fact.getDimFieldIndices().iterator();
-        while (dimIter.hasNext()) {
-          String dim = 
query.getSelectPhrases().get(dimIter.next()).getSelectAlias();
-          fromBuilder.append(getMultiFactJoinCondition(aliasCount, dim));
-          if (dimIter.hasNext()) {
-            fromBuilder.append(" AND ");
-          }
-        }
-      }
-    }
-    return fromBuilder.toString();
-  }
-
-
-  public static ASTNode convertHavingToWhere(ASTNode havingAST, 
CubeQueryContext context, Set<CandidateFact> cfacts,
-    AliasDecider aliasDecider) throws LensException {
-    if (havingAST == null) {
-      return null;
-    }
-    if (isAggregateAST(havingAST) || isTableColumnAST(havingAST) || 
isNonAggregateFunctionAST(havingAST)) {
-      // if already present in select, pick alias
-      String alias = null;
-      for (CandidateFact fact : cfacts) {
-        if (fact.isExpressionAnswerable(havingAST, context)) {
-          alias = fact.addAndGetAliasFromSelect(havingAST, aliasDecider);
-          return new ASTNode(new CommonToken(HiveParser.Identifier, alias));
-        }
-      }
-    }
-    if (havingAST.getChildren() != null) {
-      for (int i = 0; i < havingAST.getChildCount(); i++) {
-        ASTNode replaced = convertHavingToWhere((ASTNode) 
havingAST.getChild(i), context, cfacts, aliasDecider);
-        havingAST.setChild(i, replaced);
-      }
-    }
-    return havingAST;
-  }
-
-  public static ASTNode pushDownHaving(ASTNode ast, CubeQueryContext 
cubeQueryContext, Set<CandidateFact> cfacts)
-    throws LensException {
-    if (ast == null) {
-      return null;
-    }
-    if (ast.getType() == HiveParser.KW_AND || ast.getType() == 
HiveParser.TOK_HAVING) {
-      List<ASTNode> children = Lists.newArrayList();
-      for (Node child : ast.getChildren()) {
-        ASTNode newChild = pushDownHaving((ASTNode) child, cubeQueryContext, 
cfacts);
-        if (newChild != null) {
-          children.add(newChild);
-        }
-      }
-      if (children.size() == 0) {
-        return null;
-      } else if (children.size() == 1) {
-        return children.get(0);
-      } else {
-        ASTNode newASTNode = new ASTNode(ast.getToken());
-        for (ASTNode child : children) {
-          newASTNode.addChild(child);
-        }
-        return newASTNode;
-      }
-    }
-    if (isPrimitiveBooleanExpression(ast)) {
-      CandidateFact fact = pickFactToPushDown(ast, cubeQueryContext, cfacts);
-      if (fact == null) {
-        return ast;
-      }
-      fact.addToHaving(ast);
-      return null;
-    }
-    return ast;
-  }
-
-  private static CandidateFact pickFactToPushDown(ASTNode ast, 
CubeQueryContext cubeQueryContext, Set<CandidateFact>
-    cfacts) throws LensException {
-    for (CandidateFact fact : cfacts) {
-      if (fact.isExpressionAnswerable(ast, cubeQueryContext)) {
-        return fact;
-      }
-    }
-    return null;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
index 9b5a52f..0996db5 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
@@ -18,12 +18,17 @@
  */
 package org.apache.lens.cube.parse;
 
+import static java.util.stream.Collectors.toMap;
+
+import static com.google.common.collect.Sets.newHashSet;
+
 import java.util.ArrayList;
+import java.util.Comparator;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.lens.cube.metadata.AbstractCubeTable;
 import 
org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
 
 import org.apache.commons.lang.StringUtils;
@@ -36,7 +41,7 @@ import lombok.Data;
 import lombok.Getter;
 import lombok.NoArgsConstructor;
 
-public class PruneCauses<T extends AbstractCubeTable> extends HashMap<T, 
List<CandidateTablePruneCause>> {
+public class PruneCauses<T> extends HashMap<T, List<CandidateTablePruneCause>> 
{
   @Getter(lazy = true)
   private final HashMap<CandidateTablePruneCause, List<T>> reversed = 
reverse();
   @Getter(lazy = true)
@@ -48,10 +53,7 @@ public class PruneCauses<T extends AbstractCubeTable> 
extends HashMap<T, List<Ca
     HashMap<String, List<CandidateTablePruneCause>> detailedMessage = 
Maps.newHashMap();
     for (Map.Entry<CandidateTablePruneCause, List<T>> entry : 
getReversed().entrySet()) {
       String key = StringUtils.join(entry.getValue(), ",");
-      if (detailedMessage.get(key) == null) {
-        detailedMessage.put(key, new ArrayList<CandidateTablePruneCause>());
-      }
-      detailedMessage.get(key).add(entry.getKey());
+      detailedMessage.computeIfAbsent(key, k -> new 
ArrayList<>()).add(entry.getKey());
     }
     return detailedMessage;
   }
@@ -66,14 +68,11 @@ public class PruneCauses<T extends AbstractCubeTable> 
extends HashMap<T, List<Ca
     get(table).add(msg);
   }
 
-  public HashMap<CandidateTablePruneCause, List<T>> reverse() {
+  private HashMap<CandidateTablePruneCause, List<T>> reverse() {
     HashMap<CandidateTablePruneCause, List<T>> result = new 
HashMap<CandidateTablePruneCause, List<T>>();
     for (T key : keySet()) {
       for (CandidateTablePruneCause value : get(key)) {
-        if (result.get(value) == null) {
-          result.put(value, new ArrayList<T>());
-        }
-        result.get(value).add(key);
+        result.computeIfAbsent(value, k -> new ArrayList<>()).add(key);
       }
     }
     return result;
@@ -94,14 +93,10 @@ public class PruneCauses<T extends AbstractCubeTable> 
extends HashMap<T, List<Ca
   }
 
   public String getBriefCause() {
-    CandidateTablePruneCode maxCause = CandidateTablePruneCode.values()[0];
-    for (CandidateTablePruneCause cause : getReversed().keySet()) {
-      if (cause.getCause().compareTo(maxCause) > 0) {
-        maxCause = cause.getCause();
-      }
-    }
+    CandidateTablePruneCode maxCause = getReversed().keySet().stream()
+      
.map(CandidateTablePruneCause::getCause).max(Comparator.naturalOrder()).get();
     Map<CandidateTablePruneCause, String> maxCauseMap = Maps.newHashMap();
-    for (Map.Entry<CandidateTablePruneCause, List<T>> entry: 
getReversed().entrySet()) {
+    for (Map.Entry<CandidateTablePruneCause, List<T>> entry : 
getReversed().entrySet()) {
       if (entry.getKey().getCause().equals(maxCause)) {
         maxCauseMap.put(entry.getKey(), StringUtils.join(entry.getValue(), 
","));
       }
@@ -120,5 +115,11 @@ public class PruneCauses<T extends AbstractCubeTable> 
extends HashMap<T, List<Ca
   public static final class BriefAndDetailedError {
     private String brief;
     private HashMap<String, List<CandidateTablePruneCause>> details;
+
+    Map<HashSet<String>, List<CandidateTablePruneCause>> enhanced() {
+      return getDetails().entrySet().stream().collect(toMap(
+        o -> newHashSet(o.getKey().split(",")),
+        Map.Entry::getValue));
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
index 34a562d..310a655 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
@@ -36,6 +36,8 @@ import lombok.extern.slf4j.Slf4j;
 @EqualsAndHashCode(callSuper = true)
 @Slf4j
 class QueriedPhraseContext extends TracksQueriedColumns implements 
TrackQueriedCubeFields {
+  // position in org.apache.lens.cube.parse.CubeQueryContext.queriedPhrases
+  private int position;
   private final ASTNode exprAST;
   private Boolean aggregate;
   private String expr;
@@ -98,89 +100,92 @@ class QueriedPhraseContext extends TracksQueriedColumns 
implements TrackQueriedC
     return false;
   }
 
-  boolean isEvaluable(CubeQueryContext cubeQl, CandidateFact cfact) throws 
LensException {
+  /**
+   * @param cubeQl
+   * @param sc
+   * @return
+   * @throws LensException
+   */
+  public boolean isEvaluable(CubeQueryContext cubeQl, StorageCandidate sc) 
throws LensException {
     // all measures of the queried phrase should be present
     for (String msr : queriedMsrs) {
-      if (!checkForColumnExistsAndValidForRange(cfact, msr, cubeQl)) {
+      if (!checkForColumnExistsAndValidForRange(sc, msr, cubeQl)) {
         return false;
       }
     }
     // all expression columns should be evaluable
     for (String exprCol : queriedExprColumns) {
-      if (!cubeQl.getExprCtx().isEvaluable(exprCol, cfact)) {
-        log.info("expression {} is not evaluable in fact table:{}", expr, 
cfact);
+      if (!cubeQl.getExprCtx().isEvaluable(exprCol, sc)) {
+        log.info("expression {} is not evaluable in fact table:{}", expr, sc);
         return false;
       }
     }
     // all dim-attributes should be present.
     for (String col : queriedDimAttrs) {
-      if (!cfact.getColumns().contains(col.toLowerCase())) {
+      if (!sc.getColumns().contains(col.toLowerCase())) {
         // check if it available as reference
-        if (!cubeQl.getDeNormCtx().addRefUsage(cubeQl, cfact, col, 
cubeQl.getCube().getName())) {
-          log.info("column {} is not available in fact table:{} ", col, cfact);
+        if (!cubeQl.getDeNormCtx().addRefUsage(cubeQl, sc, col, 
cubeQl.getCube().getName())) {
+          log.info("column {} is not available in fact table:{} ", col, sc);
           return false;
         }
-      } else if (!isFactColumnValidForRange(cubeQl, cfact, col)) {
-        log.info("column {} is not available in range queried in fact {}", 
col, cfact);
+      } else if (!isFactColumnValidForRange(cubeQl, sc, col)) {
+        log.info("column {} is not available in range queried in fact {}", 
col, sc);
         return false;
       }
     }
     return true;
   }
 
-  public static boolean isColumnAvailableInRange(final TimeRange range, Date 
startTime, Date endTime) {
+  private static boolean isColumnAvailableInRange(final TimeRange range, Date 
startTime, Date endTime) {
     return (isColumnAvailableFrom(range.getFromDate(), startTime)
-      && isColumnAvailableTill(range.getToDate(), endTime));
+        && isColumnAvailableTill(range.getToDate(), endTime));
   }
 
-  public static boolean isColumnAvailableFrom(@NonNull final Date date, Date 
startTime) {
+  private static boolean isColumnAvailableFrom(@NonNull final Date date, Date 
startTime) {
     return (startTime == null) || date.equals(startTime) || 
date.after(startTime);
   }
 
-  public static boolean isColumnAvailableTill(@NonNull final Date date, Date 
endTime) {
+  private static boolean isColumnAvailableTill(@NonNull final Date date, Date 
endTime) {
     return (endTime == null) || date.equals(endTime) || date.before(endTime);
   }
 
-  public static boolean isFactColumnValidForRange(CubeQueryContext cubeql, 
CandidateTable cfact, String col) {
-    for(TimeRange range : cubeql.getTimeRanges()) {
-      if (!isColumnAvailableInRange(range, getFactColumnStartTime(cfact, col), 
getFactColumnEndTime(cfact, col))) {
+  public static boolean isFactColumnValidForRange(CubeQueryContext cubeql, 
StorageCandidate sc, String col) {
+    for (TimeRange range : cubeql.getTimeRanges()) {
+      if (!isColumnAvailableInRange(range, getFactColumnStartTime(sc, col), 
getFactColumnEndTime(sc, col))) {
         return false;
       }
     }
     return true;
   }
 
-  public static Date getFactColumnStartTime(CandidateTable table, String 
factCol) {
+  public static Date getFactColumnStartTime(StorageCandidate sc, String 
factCol) {
     Date startTime = null;
-    if (table instanceof CandidateFact) {
-      for (String key : ((CandidateFact) table).fact.getProperties().keySet()) 
{
-        if (key.contains(MetastoreConstants.FACT_COL_START_TIME_PFX)) {
-          String propCol = StringUtils.substringAfter(key, 
MetastoreConstants.FACT_COL_START_TIME_PFX);
-          if (factCol.equals(propCol)) {
-            startTime = ((CandidateFact) table).fact.getDateFromProperty(key, 
false, true);
-          }
+    for (String key : sc.getTable().getProperties().keySet()) {
+      if (key.contains(MetastoreConstants.FACT_COL_START_TIME_PFX)) {
+        String propCol = StringUtils.substringAfter(key, 
MetastoreConstants.FACT_COL_START_TIME_PFX);
+        if (factCol.equals(propCol)) {
+          startTime = sc.getTable().getDateFromProperty(key, false, true);
         }
       }
     }
     return startTime;
   }
 
-  public static Date getFactColumnEndTime(CandidateTable table, String 
factCol) {
+  public static Date getFactColumnEndTime(StorageCandidate sc, String factCol) 
{
     Date endTime = null;
-    if (table instanceof CandidateFact) {
-      for (String key : ((CandidateFact) table).fact.getProperties().keySet()) 
{
-        if (key.contains(MetastoreConstants.FACT_COL_END_TIME_PFX)) {
-          String propCol = StringUtils.substringAfter(key, 
MetastoreConstants.FACT_COL_END_TIME_PFX);
-          if (factCol.equals(propCol)) {
-            endTime = ((CandidateFact) table).fact.getDateFromProperty(key, 
false, true);
-          }
+    for (String key : sc.getTable().getProperties().keySet()) {
+      if (key.contains(MetastoreConstants.FACT_COL_END_TIME_PFX)) {
+        String propCol = StringUtils.substringAfter(key, 
MetastoreConstants.FACT_COL_END_TIME_PFX);
+        if (factCol.equals(propCol)) {
+          endTime = sc.getTable().getDateFromProperty(key, false, true);
         }
       }
     }
     return endTime;
   }
 
-  static boolean checkForColumnExistsAndValidForRange(CandidateTable table, 
String column, CubeQueryContext cubeql) {
-    return (table.getColumns().contains(column) &&  
isFactColumnValidForRange(cubeql, table, column));
+  static boolean checkForColumnExistsAndValidForRange(StorageCandidate sc, 
String column, CubeQueryContext cubeql) {
+    return (sc.getColumns().contains(column) && 
isFactColumnValidForRange(cubeql, sc, column));
   }
+
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
index 7298604..b94f131 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
@@ -83,4 +83,10 @@ public interface QueryAST {
   ASTNode getOrderByAST();
 
   void setOrderByAST(ASTNode node);
+
+  void setJoinAST(ASTNode node);
+
+  void setFromString(String fromString);
+  void setWhereString(String whereString);
+
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
index 62ceb12..77ebe82 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/SimpleHQLContext.java
@@ -18,14 +18,8 @@
  */
 package org.apache.lens.cube.parse;
 
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-
 import org.apache.lens.server.api.error.LensException;
 
-import org.apache.commons.lang.StringUtils;
-
 import lombok.Data;
 import lombok.extern.slf4j.Slf4j;
 
@@ -50,7 +44,7 @@ public abstract class SimpleHQLContext implements 
HQLContextInterface {
   }
 
   SimpleHQLContext(String select, String from, String where, String groupby, 
String orderby, String having,
-    Integer limit) {
+                   Integer limit) {
     this.select = select;
     this.from = from;
     this.where = where;
@@ -73,6 +67,7 @@ public abstract class SimpleHQLContext implements 
HQLContextInterface {
    * <p></p>
    * Leaving this empty implementation for the case of all expressions being 
passed in constructor. If other
    * constructors are used the missing expressions should be set here
+   *
    * @throws LensException
    */
   protected void setMissingExpressions() throws LensException {
@@ -80,57 +75,6 @@ public abstract class SimpleHQLContext implements 
HQLContextInterface {
 
   public String toHQL() throws LensException {
     setMissingExpressions();
-    String qfmt = getQueryFormat();
-    Object[] queryTreeStrings = getQueryTreeStrings();
-    if (log.isDebugEnabled()) {
-      log.debug("qfmt: {} Query strings: {}", qfmt, 
Arrays.toString(queryTreeStrings));
-    }
-    String baseQuery = String.format(qfmt, queryTreeStrings);
-    return baseQuery;
-  }
-
-  private String[] getQueryTreeStrings() throws LensException {
-    List<String> qstrs = new ArrayList<String>();
-    qstrs.add(select);
-    qstrs.add(from);
-    if (!StringUtils.isBlank(where)) {
-      qstrs.add(where);
-    }
-    if (!StringUtils.isBlank(groupby)) {
-      qstrs.add(groupby);
-    }
-    if (!StringUtils.isBlank(having)) {
-      qstrs.add(having);
-    }
-    if (!StringUtils.isBlank(orderby)) {
-      qstrs.add(orderby);
-    }
-    if (limit != null) {
-      qstrs.add(String.valueOf(limit));
-    }
-    return qstrs.toArray(new String[0]);
-  }
-
-  private final String baseQueryFormat = "SELECT %s FROM %s";
-
-  private String getQueryFormat() {
-    StringBuilder queryFormat = new StringBuilder();
-    queryFormat.append(baseQueryFormat);
-    if (!StringUtils.isBlank(where)) {
-      queryFormat.append(" WHERE %s");
-    }
-    if (!StringUtils.isBlank(groupby)) {
-      queryFormat.append(" GROUP BY %s");
-    }
-    if (!StringUtils.isBlank(having)) {
-      queryFormat.append(" HAVING %s");
-    }
-    if (!StringUtils.isBlank(orderby)) {
-      queryFormat.append(" ORDER BY %s");
-    }
-    if (limit != null) {
-      queryFormat.append(" LIMIT %s");
-    }
-    return queryFormat.toString();
+    return CandidateUtil.buildHQLString(select, from, where, groupby, orderby, 
having, limit);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/ae83caae/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
deleted file mode 100644
index 9b48213..0000000
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/SingleFactMultiStorageHQLContext.java
+++ /dev/null
@@ -1,259 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.lens.cube.parse;
-
-import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.DEFAULT_ENABLE_STORAGES_UNION;
-import static 
org.apache.lens.cube.parse.CubeQueryConfUtil.ENABLE_STORAGES_UNION;
-import static org.apache.lens.cube.parse.HQLParser.*;
-
-import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Map;
-
-import org.apache.lens.cube.error.LensCubeErrorCode;
-import org.apache.lens.cube.metadata.Dimension;
-import org.apache.lens.cube.metadata.MetastoreUtil;
-import org.apache.lens.server.api.error.LensException;
-
-import org.apache.hadoop.hive.ql.lib.Node;
-import org.apache.hadoop.hive.ql.parse.ASTNode;
-import org.apache.hadoop.hive.ql.parse.HiveParser;
-
-import org.antlr.runtime.CommonToken;
-
-public class SingleFactMultiStorageHQLContext extends UnionHQLContext {
-
-  private final QueryAST ast;
-
-  private Map<HashableASTNode, ASTNode> innerToOuterASTs = new HashMap<>();
-  private AliasDecider aliasDecider = new DefaultAliasDecider();
-
-  SingleFactMultiStorageHQLContext(CandidateFact fact, Map<Dimension, 
CandidateDim> dimsToQuery,
-    CubeQueryContext query, QueryAST ast)
-    throws LensException {
-    super(query, fact);
-    if (!query.getConf().getBoolean(ENABLE_STORAGES_UNION, 
DEFAULT_ENABLE_STORAGES_UNION)) {
-      throw new 
LensException(LensCubeErrorCode.STORAGE_UNION_DISABLED.getLensErrorInfo());
-    }
-    this.ast = ast;
-    processSelectAST();
-    processGroupByAST();
-    processHavingAST();
-    processOrderByAST();
-    processLimit();
-    setHqlContexts(getUnionContexts(fact, dimsToQuery, query, ast));
-  }
-
-  private void processSelectAST() {
-    ASTNode originalSelectAST = MetastoreUtil.copyAST(ast.getSelectAST());
-    ast.setSelectAST(new ASTNode(originalSelectAST.getToken()));
-    ASTNode outerSelectAST = processSelectExpression(originalSelectAST);
-    setSelect(getString(outerSelectAST));
-  }
-
-  private void processGroupByAST() {
-    if (ast.getGroupByAST() != null) {
-      setGroupby(getString(processExpression(ast.getGroupByAST())));
-    }
-  }
-
-  private void processHavingAST() throws LensException {
-    if (ast.getHavingAST() != null) {
-      setHaving(getString(processExpression(ast.getHavingAST())));
-      ast.setHavingAST(null);
-    }
-  }
-
-
-  private void processOrderByAST() {
-    if (ast.getOrderByAST() != null) {
-      setOrderby(getString(processOrderbyExpression(ast.getOrderByAST())));
-      ast.setOrderByAST(null);
-    }
-  }
-
-  private void processLimit() {
-    setLimit(ast.getLimitValue());
-    ast.setLimitValue(null);
-  }
-
-  private ASTNode processExpression(ASTNode astNode) {
-    if (astNode == null) {
-      return null;
-    }
-    ASTNode outerExpression = new ASTNode(astNode);
-    // iterate over all children of the ast and get outer ast corresponding to 
it.
-    for (Node child : astNode.getChildren()) {
-      outerExpression.addChild(getOuterAST((ASTNode)child));
-    }
-    return outerExpression;
-  }
-
-  private ASTNode processSelectExpression(ASTNode astNode) {
-    if (astNode == null) {
-      return null;
-    }
-    ASTNode outerExpression = new ASTNode(astNode);
-    // iterate over all children of the ast and get outer ast corresponding to 
it.
-    for (Node node : astNode.getChildren()) {
-      ASTNode child = (ASTNode)node;
-      ASTNode outerSelect = new ASTNode(child);
-      ASTNode selectExprAST = (ASTNode)child.getChild(0);
-      ASTNode outerAST = getOuterAST(selectExprAST);
-      outerSelect.addChild(outerAST);
-
-      // has an alias? add it
-      if (child.getChildCount() > 1) {
-        outerSelect.addChild(child.getChild(1));
-      }
-      outerExpression.addChild(outerSelect);
-    }
-    return outerExpression;
-  }
-
-  private ASTNode processOrderbyExpression(ASTNode astNode) {
-    if (astNode == null) {
-      return null;
-    }
-    ASTNode outerExpression = new ASTNode(astNode);
-    // sample orderby AST looks the following :
-    /*
-    TOK_ORDERBY
-   TOK_TABSORTCOLNAMEDESC
-      TOK_NULLS_LAST
-         .
-            TOK_TABLE_OR_COL
-               testcube
-            cityid
-   TOK_TABSORTCOLNAMEASC
-      TOK_NULLS_FIRST
-         .
-            TOK_TABLE_OR_COL
-               testcube
-            stateid
-   TOK_TABSORTCOLNAMEASC
-      TOK_NULLS_FIRST
-         .
-            TOK_TABLE_OR_COL
-               testcube
-            zipcode
-     */
-    for (Node node : astNode.getChildren()) {
-      ASTNode child = (ASTNode)node;
-      ASTNode outerOrderby = new ASTNode(child);
-      ASTNode tokNullsChild = (ASTNode) child.getChild(0);
-      ASTNode outerTokNullsChild = new ASTNode(tokNullsChild);
-      
outerTokNullsChild.addChild(getOuterAST((ASTNode)tokNullsChild.getChild(0)));
-      outerOrderby.addChild(outerTokNullsChild);
-      outerExpression.addChild(outerOrderby);
-    }
-    return outerExpression;
-  }
-  /*
-
-  Perform a DFS on the provided AST, and Create an AST of similar structure 
with changes specific to the
-  inner query - outer query dynamics. The resultant AST is supposed to be used 
in outer query.
-
-  Base cases:
-   1. ast is null => null
-   2. ast is aggregate_function(table.column) => add 
aggregate_function(table.column) to inner select expressions,
-            generate alias, return aggregate_function(cube.alias). Memoize the 
mapping
-            aggregate_function(table.column) => aggregate_function(cube.alias)
-            Assumption is aggregate_function is transitive i.e. f(a,b,c,d) = 
f(f(a,b), f(c,d)). SUM, MAX, MIN etc
-            are transitive, while AVG, COUNT etc are not. For non-transitive 
aggregate functions, the re-written
-            query will be incorrect.
-   3. ast has aggregates - iterate over children and add the non aggregate 
nodes as is and recursively get outer ast
-   for aggregate.
-   4. If no aggregates, simply select its alias in outer ast.
-   5. If given ast is memorized as mentioned in the above cases, return the 
mapping.
-   */
-  private ASTNode getOuterAST(ASTNode astNode) {
-    if (astNode == null) {
-      return null;
-    }
-    if (innerToOuterASTs.containsKey(new HashableASTNode(astNode))) {
-      return innerToOuterASTs.get(new HashableASTNode(astNode));
-    }
-    if (isAggregateAST(astNode)) {
-      return processAggregate(astNode);
-    } else if (hasAggregate(astNode)) {
-      ASTNode outerAST = new ASTNode(astNode);
-      for (Node child : astNode.getChildren()) {
-        ASTNode childAST = (ASTNode) child;
-        if (hasAggregate(childAST)) {
-          outerAST.addChild(getOuterAST(childAST));
-        } else {
-          outerAST.addChild(childAST);
-        }
-      }
-      return outerAST;
-    } else {
-      ASTNode innerSelectASTWithoutAlias = MetastoreUtil.copyAST(astNode);
-      ASTNode innerSelectExprAST = new ASTNode(new 
CommonToken(HiveParser.TOK_SELEXPR));
-      innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
-      String alias = aliasDecider.decideAlias(astNode);
-      ASTNode aliasNode = new ASTNode(new CommonToken(Identifier, alias));
-      innerSelectExprAST.addChild(aliasNode);
-      addToInnerSelectAST(innerSelectExprAST);
-      ASTNode outerAST = getDotAST(query.getCube().getName(), alias);
-      innerToOuterASTs.put(new HashableASTNode(innerSelectASTWithoutAlias), 
outerAST);
-      return outerAST;
-    }
-  }
-
-  private ASTNode processAggregate(ASTNode astNode) {
-    ASTNode innerSelectASTWithoutAlias = MetastoreUtil.copyAST(astNode);
-    ASTNode innerSelectExprAST = new ASTNode(new 
CommonToken(HiveParser.TOK_SELEXPR));
-    innerSelectExprAST.addChild(innerSelectASTWithoutAlias);
-    String alias = aliasDecider.decideAlias(astNode);
-    ASTNode aliasNode = new ASTNode(new CommonToken(Identifier, alias));
-    innerSelectExprAST.addChild(aliasNode);
-    addToInnerSelectAST(innerSelectExprAST);
-    ASTNode dotAST = getDotAST(query.getCube().getName(), alias);
-    ASTNode outerAST = new ASTNode(new CommonToken(TOK_FUNCTION));
-    //TODO: take care or non-transitive aggregate functions
-    outerAST.addChild(new ASTNode(new CommonToken(Identifier, 
astNode.getChild(0).getText())));
-    outerAST.addChild(dotAST);
-    innerToOuterASTs.put(new HashableASTNode(innerSelectASTWithoutAlias), 
outerAST);
-    return outerAST;
-  }
-
-  private void addToInnerSelectAST(ASTNode selectExprAST) {
-    if (ast.getSelectAST() == null) {
-      ast.setSelectAST(new ASTNode(new CommonToken(TOK_SELECT)));
-    }
-    ast.getSelectAST().addChild(selectExprAST);
-  }
-
-  private static ArrayList<HQLContextInterface> getUnionContexts(CandidateFact 
fact, Map<Dimension, CandidateDim>
-    dimsToQuery, CubeQueryContext query, QueryAST ast)
-    throws LensException {
-    ArrayList<HQLContextInterface> contexts = new ArrayList<>();
-    String alias = query.getAliasForTableName(query.getCube().getName());
-    for (String storageTable : fact.getStorageTables()) {
-      SingleFactSingleStorageHQLContext ctx = new 
SingleFactSingleStorageHQLContext(fact, storageTable + " " + alias,
-        dimsToQuery, query, DefaultQueryAST.fromCandidateFact(fact, 
storageTable, ast));
-      contexts.add(ctx);
-    }
-    return contexts;
-  }
-}

Reply via email to