http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
index c63c4c0..47bce65 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryContext.java
@@ -29,8 +29,17 @@ import static org.apache.hadoop.hive.ql.parse.HiveParser.*;
 import static com.google.common.base.Preconditions.checkArgument;
 
 import java.io.ByteArrayOutputStream;
-import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
 import java.util.function.Predicate;
 
 import org.apache.lens.cube.error.LensCubeErrorCode;
@@ -66,6 +75,7 @@ public class CubeQueryContext extends TracksQueriedColumns 
implements QueryAST,
   static final String TIME_RANGE_FUNC = "time_range_in";
   public static final String NOW = "now";
   static final String DEFAULT_TABLE = "_default_";
+  @Getter
   private final ASTNode ast;
   @Getter
   private final QB qb;
@@ -179,7 +189,7 @@ public class CubeQueryContext extends TracksQueriedColumns 
implements QueryAST,
   @Setter
   private DenormalizationResolver.DenormalizationContext deNormCtx;
   @Getter
-  private PruneCauses<StorageCandidate>  storagePruningMsgs = new 
PruneCauses<>();
+  private PruneCauses<Candidate>  storagePruningMsgs = new PruneCauses<>();
   @Getter
   private Map<Dimension, PruneCauses<CubeDimensionTable>> dimPruningMsgs =
     new HashMap<Dimension, PruneCauses<CubeDimensionTable>>();
@@ -462,13 +472,17 @@ public class CubeQueryContext extends 
TracksQueriedColumns implements QueryAST,
   }
 
   void addCandidatePruningMsg(Candidate cand, CandidateTablePruneCause 
pruneCause) {
-    Set<StorageCandidate> scs = CandidateUtil.getStorageCandidates(cand);
-    for (StorageCandidate sc : scs) {
-      addStoragePruningMsg(sc, pruneCause);
+    if (cand instanceof SegmentationCandidate) {
+      addStoragePruningMsg(cand, pruneCause);
+    } else {
+      Set<StorageCandidate> scs = CandidateUtil.getStorageCandidates(cand);
+      for (StorageCandidate sc : scs) {
+        addStoragePruningMsg(sc, pruneCause);
+      }
     }
   }
 
-  void addStoragePruningMsg(StorageCandidate sc, CandidateTablePruneCause... 
factPruningMsgs) {
+  void addStoragePruningMsg(Candidate sc, CandidateTablePruneCause... 
factPruningMsgs) {
     for (CandidateTablePruneCause factPruningMsg: factPruningMsgs) {
       log.info("Pruning Storage {} with cause: {}", sc, factPruningMsg);
       storagePruningMsgs.addPruningMsg(sc, factPruningMsg);
@@ -627,14 +641,6 @@ public class CubeQueryContext extends TracksQueriedColumns 
implements QueryAST,
     }
   }
 
-  private void updateFromString(StorageCandidate sc, Map<Dimension, 
CandidateDim> dimsToQuery) throws LensException {
-    fromString = "%s"; // storage string is updated later
-    if (isAutoJoinResolved()) {
-      fromString =
-        getAutoJoinCtx().getFromString(fromString, sc, dimsToQuery.keySet(), 
dimsToQuery, this, this);
-    }
-  }
-
   public String getSelectString() {
     return HQLParser.getString(selectAST);
   }
@@ -705,7 +711,7 @@ public class CubeQueryContext extends TracksQueriedColumns 
implements QueryAST,
   String getQBFromString(StorageCandidate candidate, Map<Dimension, 
CandidateDim> dimsToQuery) throws LensException {
     String fromString;
     if (getJoinAST() == null) {
-      if (cube != null) {
+      if (candidate != null) {
         if (dimensions.size() > 0) {
           throw new 
LensException(LensCubeErrorCode.NO_JOIN_CONDITION_AVAILABLE.getLensErrorInfo());
         }
@@ -772,25 +778,13 @@ public class CubeQueryContext extends 
TracksQueriedColumns implements QueryAST,
 
   void setNonexistingParts(Map<String, Set<String>> nonExistingParts) throws 
LensException {
     if (!nonExistingParts.isEmpty()) {
-      ByteArrayOutputStream out = null;
-      String partsStr;
-      try {
-        ObjectMapper mapper = new ObjectMapper();
-        out = new ByteArrayOutputStream();
+      ObjectMapper mapper = new ObjectMapper();
+      try (ByteArrayOutputStream out = new ByteArrayOutputStream();){
         mapper.writeValue(out, nonExistingParts);
-        partsStr = out.toString("UTF-8");
+        conf.set(NON_EXISTING_PARTITIONS, out.toString("UTF-8"));
       } catch (Exception e) {
         throw new LensException("Error writing non existing parts", e);
-      } finally {
-        if (out != null) {
-          try {
-            out.close();
-          } catch (IOException e) {
-            throw new LensException(e);
-          }
-        }
       }
-      conf.set(NON_EXISTING_PARTITIONS, partsStr);
     } else {
       conf.unset(NON_EXISTING_PARTITIONS);
     }
@@ -800,7 +794,7 @@ public class CubeQueryContext extends TracksQueriedColumns 
implements QueryAST,
     return conf.get(NON_EXISTING_PARTITIONS);
   }
 
-  private Map<Dimension, CandidateDim> pickCandidateDimsToQuery(Set<Dimension> 
dimensions) throws LensException {
+  Map<Dimension, CandidateDim> pickCandidateDimsToQuery(Set<Dimension> 
dimensions) throws LensException {
     Map<Dimension, CandidateDim> dimsToQuery = new HashMap<Dimension, 
CandidateDim>();
     if (!dimensions.isEmpty()) {
       for (Dimension dim : dimensions) {
@@ -828,56 +822,39 @@ public class CubeQueryContext extends 
TracksQueriedColumns implements QueryAST,
     return dimsToQuery;
   }
 
-  private Candidate pickCandidateToQuery() throws LensException {
-    Candidate cand = null;
+  Candidate pickCandidateToQuery() throws LensException {
+    Candidate cand;
     if (hasCubeInQuery()) {
-      if (candidates.size() > 0) {
-        cand = candidates.iterator().next();
+      Iterator<Candidate> iter = candidates.iterator();
+      if (pickedCandidate == null && iter.hasNext()) {
+        cand = iter.next();
         log.info("Available Candidates:{}, picking up Candidate: {} for 
querying", candidates, cand);
-      } else {
+        pickedCandidate = cand;
+      }
+      if (pickedCandidate == null) {
         throwNoCandidateFactException();
       }
     }
-    return cand;
+    return pickedCandidate;
   }
-
   void throwNoCandidateFactException() throws LensException {
-    String reason = "";
-    if (!storagePruningMsgs.isEmpty()) {
-      ByteArrayOutputStream out = null;
-      try {
-        ObjectMapper mapper = new ObjectMapper();
-        out = new ByteArrayOutputStream();
-        mapper.writeValue(out, storagePruningMsgs.getJsonObject());
-        reason = out.toString("UTF-8");
-      } catch (Exception e) {
-        throw new LensException("Error writing fact pruning messages", e);
-      } finally {
-        if (out != null) {
-          try {
-            out.close();
-          } catch (IOException e) {
-            throw new LensException(e);
-          }
-        }
-      }
-    }
-    log.error("Query rewrite failed due to NO_CANDIDATE_FACT_AVAILABLE, Cause 
{}", storagePruningMsgs.toJsonObject());
+    log.error("Query rewrite failed due to NO_CANDIDATE_FACT_AVAILABLE, Cause 
{}", storagePruningMsgs.toJsonString());
     throw new NoCandidateFactAvailableException(this);
   }
-
-  private HQLContextInterface hqlContext;
+  @Getter
+  private QueryWriterContext queryWriterContext;
+  private QueryWriter queryWriter;
 
   @Getter
   private Candidate pickedCandidate;
   @Getter
   private Collection<CandidateDim> pickedDimTables;
 
-  private void addRangeClauses(StorageCandidate sc) throws LensException {
+  void addRangeClauses(StorageCandidateHQLContext sc) throws LensException {
     if (sc != null) {
       // resolve timerange positions and replace it by corresponding where 
clause
       for (TimeRange range : getTimeRanges()) {
-        String rangeWhere = CandidateUtil.getTimeRangeWhereClasue(rangeWriter, 
sc, range);
+        String rangeWhere = 
sc.getStorageCandidate().getTimeRangeWhereClasue(rangeWriter, range);
         if (!StringUtils.isBlank(rangeWhere)) {
           ASTNode updatedRangeAST = HQLParser.parseExpr(rangeWhere, conf);
           updateTimeRangeNode(sc.getQueryAst().getWhereAST(), 
range.getAstNode(), updatedRangeAST);
@@ -924,130 +901,52 @@ public class CubeQueryContext extends 
TracksQueriedColumns implements QueryAST,
     }
   }
 
-
-  public String toHQL() throws LensException {
-    Candidate cand = pickCandidateToQuery();
-    Map<Dimension, CandidateDim> dimsToQuery = 
pickCandidateDimsToQuery(dimensions);
-    Collection<StorageCandidate> scSet = new HashSet<>();
-    if (cand != null) {
-      scSet.addAll(CandidateUtil.getStorageCandidates(cand));
-    }
-
-    //Expand and get update period specific storage candidates if required.
-    scSet = expandStorageCandidates(scSet);
-
-    log.info("Candidate: {}, DimsToQuery: {}", cand, dimsToQuery);
-    if (autoJoinCtx != null) {
-      // prune join paths for picked fact and dimensions
-      autoJoinCtx.pruneAllPaths(cube, scSet, dimsToQuery);
-    }
-
-    Map<StorageCandidate, Set<Dimension>> factDimMap = new HashMap<>();
-    if (cand != null) {
-      // Set the default queryAST for StorageCandidate and copy child ASTs 
from cubeql.
-      // Later in the rewrite flow each Storage candidate will modify them 
accordingly.
-      for (StorageCandidate sc : scSet) {
-        sc.setQueryAst(DefaultQueryAST.fromStorageCandidate(sc, this));
-        CandidateUtil.copyASTs(this, sc.getQueryAst());
-        factDimMap.put(sc, new HashSet<>(dimsToQuery.keySet()));
-      }
-      for (StorageCandidate sc : scSet) {
-        addRangeClauses(sc);
-      }
-    }
-
-    // pick dimension tables required during expression expansion for the 
picked fact and dimensions
-    Set<Dimension> exprDimensions = new HashSet<>();
-    if (!scSet.isEmpty()) {
-      for (StorageCandidate sc : scSet) {
-        Set<Dimension> scExprDimTables = exprCtx.rewriteExprCtx(this, sc, 
dimsToQuery, sc.getQueryAst());
-        exprDimensions.addAll(scExprDimTables);
-        factDimMap.get(sc).addAll(scExprDimTables);
-      }
-    } else {
-      // dim only query
-      exprDimensions.addAll(exprCtx.rewriteExprCtx(this, null, dimsToQuery, 
this));
-    }
-    dimsToQuery.putAll(pickCandidateDimsToQuery(exprDimensions));
-    log.info("StorageCandidates: {}, DimsToQuery: {}", scSet, dimsToQuery);
-
-    // pick denorm tables for the picked fact and dimensions
-    Set<Dimension> denormTables = new HashSet<>();
-    if (!scSet.isEmpty()) {
-      for (StorageCandidate sc : scSet) {
-        Set<Dimension> scDenormTables = deNormCtx.rewriteDenormctx(this, sc, 
dimsToQuery, !scSet.isEmpty());
-        denormTables.addAll(scDenormTables);
-        factDimMap.get(sc).addAll(scDenormTables);
-      }
+  public QueryWriterContext getQueryWriterContext(Candidate cand, 
Map<Dimension, CandidateDim> dimsToQuery)
+    throws LensException {
+    if (cand == null) {
+      return new DimOnlyHQLContext(dimsToQuery, this);
     } else {
-      denormTables.addAll(deNormCtx.rewriteDenormctx(this, null, dimsToQuery, 
false));
-    }
-    dimsToQuery.putAll(pickCandidateDimsToQuery(denormTables));
-    log.info("StorageCandidates: {}, DimsToQuery: {}", scSet, dimsToQuery);
-    // Prune join paths once denorm tables are picked
-    if (autoJoinCtx != null) {
-      // prune join paths for picked fact and dimensions
-      autoJoinCtx.pruneAllPaths(cube, scSet, dimsToQuery);
-    }
-    if (autoJoinCtx != null) {
-      // add optional dims from Join resolver
-      Set<Dimension> joiningTables = new HashSet<>();
-      if (scSet != null && scSet.size() > 1) {
-        for (StorageCandidate sc : scSet) {
-          Set<Dimension> scJoiningTables = autoJoinCtx.pickOptionalTables(sc, 
factDimMap.get(sc), this);
-          factDimMap.get(sc).addAll(scJoiningTables);
-          joiningTables.addAll(scJoiningTables);
-        }
-      } else {
-        joiningTables.addAll(autoJoinCtx.pickOptionalTables(null, 
dimsToQuery.keySet(), this));
-      }
-      dimsToQuery.putAll(pickCandidateDimsToQuery(joiningTables));
+      return cand.toQueryWriterContext(dimsToQuery, this);
     }
-    log.info("Picked StorageCandidates: {} DimsToQuery: {}", scSet, 
dimsToQuery);
-    pickedDimTables = dimsToQuery.values();
-    pickedCandidate = cand;
+  }
+
+  QueryWriter getQueryWriter() throws LensException {
+    if (queryWriter == null) {
+      Candidate cand = pickCandidateToQuery();
+      Map<Dimension, CandidateDim> dimsToQuery = 
pickCandidateDimsToQuery(dimensions);
+      log.info("Candidate: {}, DimsToQuery: {}", cand, dimsToQuery);
+      queryWriterContext = getQueryWriterContext(cand, dimsToQuery);
 
-    //Set From string and time range clause
-    if (!scSet.isEmpty()) {
-      for (StorageCandidate sc : scSet) {
-        sc.updateFromString(this, factDimMap.get(sc), dimsToQuery);
+      if (cand != null && autoJoinCtx != null) {
+        // prune join paths for picked fact and dimensions
+        autoJoinCtx.pruneAllPaths(cube, cand.getColumns(), dimsToQuery);
       }
-    } else {
-      updateFromString(null, dimsToQuery);
-    }
-
-    //update dim filter with fact filter, set where string in sc
-    if (scSet.size() > 0) {
-      for (StorageCandidate sc : scSet) {
-        String qualifiedStorageTable = sc.getStorageTable();
-        String storageTable = 
qualifiedStorageTable.substring(qualifiedStorageTable.indexOf(".") + 1);
-        String where = getWhere(sc, autoJoinCtx,
-          sc.getQueryAst().getWhereAST(), 
getAliasForTableName(sc.getBaseTable().getName()),
-          shouldReplaceDimFilterWithFactFilter(), storageTable, dimsToQuery);
-        sc.setWhereString(where);
+      // pick dimension tables required during expression expansion for the 
picked fact and dimensions
+      queryWriterContext.addExpressionDims();
+      // pick denorm tables for the picked fact and dimensions
+      queryWriterContext.addDenormDims();
+      // Prune join paths once denorm tables are picked
+      if (cand != null && autoJoinCtx != null) {
+        // prune join paths for picked fact and dimensions
+        autoJoinCtx.pruneAllPaths(cube, cand.getColumns(), dimsToQuery);
       }
-    }
+      queryWriterContext.addAutoJoinDims();
+      pickedDimTables = dimsToQuery.values();
 
-    if (cand == null) {
-      hqlContext = new DimOnlyHQLContext(dimsToQuery, this, this);
-      return hqlContext.toHQL();
-    } else if (scSet.size() == 1) {
-      StorageCandidate sc = (StorageCandidate) scSet.iterator().next();
-      sc.updateAnswerableSelectColumns(this);
-      return getInsertClause() + sc.toHQL(factDimMap.get(sc));
-    } else {
-      UnionQueryWriter uqc = new UnionQueryWriter(scSet, this);
-      return getInsertClause() + uqc.toHQL(factDimMap);
+      //Set From string and time range clause
+      queryWriterContext.updateFromString();
+      //update dim filter with fact filter
+      queryWriterContext.updateDimFilterWithFactFilter();
+      queryWriter = queryWriterContext.toQueryWriter();
     }
+    return queryWriter;
   }
-
-  private Collection<StorageCandidate> 
expandStorageCandidates(Collection<StorageCandidate> scSet)
-    throws LensException {
-    Collection<StorageCandidate> expandedList = new 
ArrayList<StorageCandidate>();
-    for (StorageCandidate sc : scSet) {
-      expandedList.addAll(sc.splitAtUpdatePeriodLevelIfReq());
+  private String asHQL = null;
+  public String toHQL() throws LensException {
+    if (asHQL == null) {
+      asHQL = getQueryWriter().toHQL();
     }
-    return  expandedList;
+    return asHQL;
   }
 
   public ASTNode toAST(Context ctx) throws LensException {
@@ -1173,18 +1072,11 @@ public class CubeQueryContext extends 
TracksQueriedColumns implements QueryAST,
     return optionalDimensionMap.keySet();
   }
 
-  /**
-   * @return the hqlContext
-   */
-  HQLContextInterface getHqlContext() {
-    return hqlContext;
-  }
-
   public boolean shouldReplaceTimeDimWithPart() {
     return getConf().getBoolean(REPLACE_TIMEDIM_WITH_PART_COL, 
DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL);
   }
 
-  private boolean shouldReplaceDimFilterWithFactFilter() {
+  boolean shouldReplaceDimFilterWithFactFilter() {
     return getConf().getBoolean(REWRITE_DIM_FILTER_TO_FACT_FILTER, 
DEFAULT_REWRITE_DIM_FILTER_TO_FACT_FILTER);
   }
 
@@ -1240,14 +1132,14 @@ public class CubeQueryContext extends 
TracksQueriedColumns implements QueryAST,
     return ImmutableSet.copyOf(this.queriedTimeDimCols);
   }
 
-  private String getWhere(StorageCandidate sc, AutoJoinContext autoJoinCtx,
-                          ASTNode node, String cubeAlias,
-                          boolean shouldReplaceDimFilter, String storageTable,
-                          Map<Dimension, CandidateDim> dimToQuery) throws 
LensException {
+  String getWhere(StorageCandidateHQLContext sc, AutoJoinContext autoJoinCtx,
+    ASTNode node, String cubeAlias,
+    boolean shouldReplaceDimFilter, String storageTable,
+    Map<Dimension, CandidateDim> dimToQuery) throws LensException {
     String whereString;
     if (autoJoinCtx != null && shouldReplaceDimFilter) {
       List<String> allfilters = new ArrayList<>();
-      getAllFilters(node, cubeAlias, allfilters, 
autoJoinCtx.getJoinClause(sc), dimToQuery);
+      getAllFilters(node, cubeAlias, allfilters, 
autoJoinCtx.getJoinClause(sc.getStorageCandidate()), dimToQuery);
       whereString = StringUtils.join(allfilters, " and ");
     } else {
       whereString = HQLParser.getString(sc.getQueryAst().getWhereAST());

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
index 6bee386..d064cdb 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/CubeQueryRewriter.java
@@ -21,7 +21,6 @@ package org.apache.lens.cube.parse;
 import static org.apache.lens.cube.error.LensCubeErrorCode.SYNTAX_ERROR;
 
 import java.io.IOException;
-import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.lens.server.api.error.LensException;
@@ -33,6 +32,10 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.parse.*;
 
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+import lombok.AccessLevel;
+import lombok.Getter;
 import lombok.extern.slf4j.Slf4j;
 
 /**
@@ -41,7 +44,9 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 public class CubeQueryRewriter {
   private final Configuration conf;
-  private final List<ContextRewriter> rewriters = new 
ArrayList<ContextRewriter>();
+  @VisibleForTesting
+  @Getter(AccessLevel.PACKAGE)
+  private final ImmutableList<ContextRewriter> rewriters;
   private final HiveConf hconf;
   private Context qlCtx = null;
   private boolean lightFactFirst;
@@ -56,7 +61,9 @@ public class CubeQueryRewriter {
     }
     lightFactFirst =
       conf.getBoolean(CubeQueryConfUtil.LIGHTEST_FACT_FIRST, 
CubeQueryConfUtil.DEFAULT_LIGHTEST_FACT_FIRST);
-    setupRewriters();
+    ImmutableList.Builder<ContextRewriter> builder = ImmutableList.builder();
+    setupRewriters(builder);
+    rewriters = builder.build();
   }
 
   /*
@@ -133,7 +140,7 @@ public class CubeQueryRewriter {
    * copied from original query and the expressions missing from this fact
    * removed.
    */
-  private void setupRewriters() {
+  private void setupRewriters(ImmutableList.Builder<ContextRewriter> 
rewriters) {
     // Resolve columns - the column alias and table alias
     rewriters.add(new ColumnResolver());
     // Rewrite base trees (groupby, having, orderby, limit) using aliases
@@ -183,13 +190,18 @@ public class CubeQueryRewriter {
 
     // Phase 2 of storageTableResolver: resolve storage table partitions.
     rewriters.add(storageTableResolver);
+    // For all segmentation candidates, for all segments, modify query ast and 
perform rewrites on inner cubes
+    // Also takes care of performing rewrites in segmentation candidates under 
a union/join candidate
+    rewriters.add(new SegmentationInnerRewriter(conf, hconf));
     // In case partial data is allowed (via 
lens.cube.query.fail.if.data.partial = false) and there are many
     // combinations with partial data, pick the one that covers the maximum 
part of time ranges(s) queried
     rewriters.add(new MaxCoveringFactResolver(conf));
     // Phase 3 of storageTableResolver:  resolve dimension tables and 
partitions.
     rewriters.add(storageTableResolver);
-    // Prune candidate tables for which denorm column references do not exist
+
     //TODO union: phase 2 of denormResolver needs to be moved before 
CoveringSetResolver.. check if this makes sense
+
+    // Prune candidate tables for which denorm column references do not exist
     rewriters.add(denormResolver);
     // Phase 2 of exprResolver : Prune candidate facts without any valid 
expressions
     rewriters.add(exprResolver);
@@ -203,6 +215,9 @@ public class CubeQueryRewriter {
     // queried will be picked. Rest of the combinations will be pruned
     rewriters.add(new LeastPartitionResolver());
     rewriters.add(new LightestDimensionResolver());
+    // Takes all candidates remaining till now, tries to explode that.
+    // see CandidateExploder#rewriteContext and Candidate#explode for further 
documentation
+    rewriters.add(new CandidateExploder());
   }
 
   public CubeQueryContext rewrite(ASTNode astnode) throws LensException {
@@ -239,7 +254,7 @@ public class CubeQueryRewriter {
     int i = 0;
     for (ContextRewriter rewriter : rewriters) {
       /*
-       * Adding iteration number as part of gauge name since some rewriters 
are have more than one phase, and having
+       * Adding iteration number as part of gauge name since some rewriters 
have more than one phase, and having
        * iter number gives the idea which iteration the rewriter was run
        */
       MethodMetricsContext mgauge = 
MethodMetricsFactory.createMethodGauge(ctx.getConf(), true,
@@ -251,10 +266,6 @@ public class CubeQueryRewriter {
     }
   }
 
-  public Context getQLContext() {
-    return qlCtx;
-  }
-
   public void clear() {
     try {
       if (qlCtx != null) {

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
index 29da0a2..9cf37e6 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DefaultQueryAST.java
@@ -61,12 +61,14 @@ public class DefaultQueryAST implements QueryAST {
     return null;
   }
 
-  public static DefaultQueryAST fromStorageCandidate(StorageCandidate sc, 
QueryAST ast) throws
-      LensException {
+  public static DefaultQueryAST fromStorageCandidate(DimHQLContext sc) throws 
LensException {
+    return fromStorageCandidate(sc, sc.getQueryAst());
+  }
+  public static DefaultQueryAST fromStorageCandidate(DimHQLContext sc, 
QueryAST ast) {
     return new DefaultQueryAST(ast.getSelectAST(),
         null,
         ast.getGroupByAST(), ast.getHavingAST(), ast.getJoinAST(), 
ast.getOrderByAST(), ast.getLimitValue(),
         ast.getFromString(),
-        sc.getWhereString());
+        sc != null ? sc.getWhere() : null);
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
index 30fa873..881afc8 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DenormalizationResolver.java
@@ -140,14 +140,14 @@ public class DenormalizationResolver implements 
ContextRewriter {
     }
 
     Set<Dimension> rewriteDenormctx(CubeQueryContext cubeql,
-      StorageCandidate sc, Map<Dimension, CandidateDim> dimsToQuery, boolean 
replaceFact) throws LensException {
+      DimHQLContext sc, Map<Dimension, CandidateDim> dimsToQuery, boolean 
replaceFact) throws LensException {
       Set<Dimension> refTbls = new HashSet<>();
       log.info("Doing denorm changes for fact :{}", sc);
 
       if (!tableToRefCols.isEmpty()) {
         // pick referenced columns for fact
-        if (sc != null) {
-          pickColumnsForTable(cubeql, sc.getStorageTable());
+        if (sc.getStorageCandidate() != null) {
+          pickColumnsForTable(cubeql, 
sc.getStorageCandidate().getStorageTable());
         }
         // pick referenced columns for dimensions
         if (dimsToQuery != null) {
@@ -159,7 +159,7 @@ public class DenormalizationResolver implements 
ContextRewriter {
         replaceReferencedColumns(cubeql, sc, replaceFact);
         // Add the picked references to dimsToQuery
         for (PickedReference picked : pickedRefs) {
-          if (isPickedFor(picked, sc, dimsToQuery)) {
+          if (isPickedFor(picked, sc.getStorageCandidate(), dimsToQuery)) {
             refTbls.add((Dimension) 
cubeql.getCubeTableForAlias(picked.getChainRef().getChainName()));
             cubeql.addColumnsQueried(picked.getChainRef().getChainName(), 
picked.getChainRef().getRefColumn());
           }
@@ -264,11 +264,12 @@ public class DenormalizationResolver implements 
ContextRewriter {
       }
     }
 
-    private void replaceReferencedColumns(CubeQueryContext cubeql, 
StorageCandidate sc, boolean replaceFact)
-        throws LensException {
+    private void replaceReferencedColumns(CubeQueryContext cubeql, 
DimHQLContext sc, boolean replaceFact)
+      throws LensException {
       QueryAST ast = cubeql;
-      boolean factRefExists = sc != null && 
tableToRefCols.get(sc.getStorageTable()) != null
-          && !tableToRefCols.get(sc.getStorageTable()).isEmpty();
+      boolean factRefExists = sc.getStorageCandidate() != null
+        && tableToRefCols.get(sc.getStorageCandidate().getStorageTable()) != 
null
+        && 
!tableToRefCols.get(sc.getStorageCandidate().getStorageTable()).isEmpty();
       if (replaceFact && factRefExists) {
         ast = sc.getQueryAst();
       }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
index 95d6572..43510dd 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimHQLContext.java
@@ -18,7 +18,6 @@
  */
 package org.apache.lens.cube.parse;
 
-import static org.apache.lens.cube.parse.StorageUtil.joinWithAnd;
 
 import java.util.Map;
 import java.util.Set;
@@ -28,53 +27,35 @@ import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 
+import lombok.Getter;
+
 /**
  * Dimension HQLContext.
  * <p></p>
  * Contains all the dimensions queried and their candidate dim tables Update 
where string with storage filters added
  * dimensions queried.
  */
-abstract class DimHQLContext extends SimpleHQLContext {
-
-  private final Map<Dimension, CandidateDim> dimsToQuery;
-  private final Set<Dimension> queriedDims;
-  private String where;
-  protected final CubeQueryContext query;
-  private final String astFromString;
+public abstract class DimHQLContext extends SimpleHQLContext implements 
QueryWriterContext {
+  @Getter
+  protected final CubeQueryContext cubeQueryContext;
+  @Getter
+  protected final Map<Dimension, CandidateDim> dimsToQuery;
 
-  public CubeQueryContext getQuery() {
-    return query;
-  }
-  DimHQLContext(CubeQueryContext query, Map<Dimension, CandidateDim> 
dimsToQuery,
-    Set<Dimension> queriedDims, QueryAST ast) throws LensException {
-    super(ast.getSelectString(), ast.getGroupByString(), 
ast.getOrderByString(),
-        ast.getHavingString(), ast.getLimitValue());
-    this.query = query;
+  DimHQLContext(CubeQueryContext query, final Map<Dimension, CandidateDim> 
dimsToQuery, QueryAST queryAST) {
+    super(queryAST);
+    this.cubeQueryContext = query;
     this.dimsToQuery = dimsToQuery;
-    this.where = ast.getWhereString();
-    this.queriedDims = queriedDims;
-    this.astFromString = ast.getFromString();
   }
 
-  protected void setMissingExpressions() throws LensException {
-    setFrom(String.format(astFromString, getFromTable()));
-    setWhere(joinWithAnd(
-      genWhereClauseWithDimPartitions(where), getQuery().getConf().getBoolean(
-        CubeQueryConfUtil.REPLACE_TIMEDIM_WITH_PART_COL, 
CubeQueryConfUtil.DEFAULT_REPLACE_TIMEDIM_WITH_PART_COL)
-        ? getPostSelectionWhereClause() : null));
-  }
+  public abstract StorageCandidate getStorageCandidate();
 
-  protected String getPostSelectionWhereClause() throws LensException {
-    return null;
+  private Set<Dimension> getQueriedDims() {
+    return dimsToQuery.keySet();
   }
 
   protected abstract String getFromTable() throws LensException;
 
-  public Map<Dimension, CandidateDim> getDimsToQuery() {
-    return dimsToQuery;
-  }
-
-  private String genWhereClauseWithDimPartitions(String originalWhere) {
+  String genWhereClauseWithDimPartitions(String originalWhere) {
     StringBuilder whereBuf;
     if (originalWhere != null) {
       whereBuf = new StringBuilder(originalWhere);
@@ -83,11 +64,12 @@ abstract class DimHQLContext extends SimpleHQLContext {
     }
 
     // add where clause for all dimensions
-    if (queriedDims != null) {
+    if (getCubeQueryContext() != null) {
       boolean added = (originalWhere != null);
-      for (Dimension dim : queriedDims) {
-        CandidateDim cdim = dimsToQuery.get(dim);
-        String alias = query.getAliasForTableName(dim.getName());
+      for (Map.Entry<Dimension, CandidateDim> dimensionCandidateDimEntry : 
getDimsToQuery().entrySet()) {
+        Dimension dim = dimensionCandidateDimEntry.getKey();
+        CandidateDim cdim = dimensionCandidateDimEntry.getValue();
+        String alias = 
getCubeQueryContext().getAliasForTableName(dim.getName());
         if (!cdim.isWhereClauseAdded() && 
!StringUtils.isBlank(cdim.getWhereClause())) {
           appendWhereClause(whereBuf, StorageUtil.getWhereClause(cdim, alias), 
added);
           added = true;
@@ -113,4 +95,35 @@ abstract class DimHQLContext extends SimpleHQLContext {
       filterCondition.append(")");
     }
   }
+
+  @Override
+  public void addAutoJoinDims() throws LensException {
+    if (getCubeQueryContext().isAutoJoinResolved()) {
+      Set<Dimension> autoJoinDims = 
getCubeQueryContext().getAutoJoinCtx().pickOptionalTables(this, 
getQueriedDims(),
+        getCubeQueryContext());
+      Map<Dimension, CandidateDim> autoJoinDimsToQuery = 
getCubeQueryContext().pickCandidateDimsToQuery(autoJoinDims);
+      dimsToQuery.putAll(autoJoinDimsToQuery);
+    }
+  }
+
+  @Override
+  public void addExpressionDims() throws LensException {
+    Set<Dimension> expressionDims = 
getCubeQueryContext().getExprCtx().rewriteExprCtx(getCubeQueryContext(), this,
+      getDimsToQuery());
+    Map<Dimension, CandidateDim> expressionDimsToQuery = 
getCubeQueryContext().pickCandidateDimsToQuery(expressionDims);
+    dimsToQuery.putAll(expressionDimsToQuery);
+  }
+
+  @Override
+  public void addDenormDims() throws LensException {
+    Set<Dimension> denormDims = 
getCubeQueryContext().getDeNormCtx().rewriteDenormctx(getCubeQueryContext(), 
this,
+      getDimsToQuery(), getStorageCandidate() != null);
+    Map<Dimension, CandidateDim> denormDimsToQuery = 
getCubeQueryContext().pickCandidateDimsToQuery(denormDims);
+    dimsToQuery.putAll(denormDimsToQuery);
+  }
+
+  @Override
+  public QueryWriter toQueryWriter() throws LensException {
+    return this;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
index 6f6572e..27b17e1 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/DimOnlyHQLContext.java
@@ -18,8 +18,8 @@
  */
 package org.apache.lens.cube.parse;
 
+
 import java.util.Map;
-import java.util.Set;
 
 import org.apache.lens.cube.metadata.Dimension;
 import org.apache.lens.server.api.error.LensException;
@@ -30,29 +30,60 @@ import org.apache.lens.server.api.error.LensException;
  * <p/>
  * Updates from string with join clause expanded
  */
-class DimOnlyHQLContext extends DimHQLContext {
+public class DimOnlyHQLContext extends DimHQLContext {
 
-  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext 
query, QueryAST ast)
-    throws LensException {
-    this(dimsToQuery, dimsToQuery.keySet(), query, ast);
-  }
 
-  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, Set<Dimension> 
dimsQueried,
-    CubeQueryContext query, QueryAST ast)
+  DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, CubeQueryContext 
query) throws LensException {
+    this(dimsToQuery, query, query);
+  }
+  private DimOnlyHQLContext(Map<Dimension, CandidateDim> dimsToQuery, 
CubeQueryContext query, QueryAST ast)
     throws LensException {
-    super(query, dimsToQuery, dimsQueried, ast);
+    super(query, dimsToQuery, ast);
   }
 
-  public String toHQL() throws LensException {
-    return query.getInsertClause() + super.toHQL();
+  @Override
+  public StorageCandidate getStorageCandidate() {
+    return null;
   }
 
   protected String getFromTable() throws LensException {
-    if (query.isAutoJoinResolved()) {
-      return 
getDimsToQuery().get(query.getAutoJoinCtx().getAutoJoinTarget()).getStorageString(
-        
query.getAliasForTableName(query.getAutoJoinCtx().getAutoJoinTarget().getName()));
+    if (getCubeQueryContext().isAutoJoinResolved()) {
+      return 
getDimsToQuery().get(getCubeQueryContext().getAutoJoinCtx().getAutoJoinTarget())
+        .getStorageString(getCubeQueryContext().getAliasForTableName(
+          getCubeQueryContext().getAutoJoinCtx().getAutoJoinTarget().getName())
+        );
     } else {
-      return query.getQBFromString(null, getDimsToQuery());
+      return getCubeQueryContext().getQBFromString(null, getDimsToQuery());
+    }
+  }
+
+  @Override
+  public void updateDimFilterWithFactFilter() throws LensException {
+    //void
+  }
+
+  @Override
+  public void updateFromString() throws LensException {
+    String fromString = "%s"; // storage string is updated later
+    if (getCubeQueryContext().isAutoJoinResolved()) {
+      setFrom(
+        getCubeQueryContext().getAutoJoinCtx().getFromString(
+          fromString, this, getDimsToQuery(), getCubeQueryContext()
+        )
+      );
+    }
+  }
+
+  @Override
+  protected void setMissingExpressions() throws LensException {
+    if (getFrom() == null) {
+      setFrom("%s");
+    }
+    setFrom(String.format(getFrom(), getFromTable()));
+    if (getWhere() == null) {
+      setWhere(queryAst.getWhereString());
     }
+    setWhere(genWhereClauseWithDimPartitions(getWhere()));
+    setPrefix(getCubeQueryContext().getInsertClause());
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
index 926a4d0..f86a84a 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/ExpressionResolver.java
@@ -378,15 +378,14 @@ class ExpressionResolver implements ContextRewriter {
       return ec.isEvaluable(cTable);
     }
 
-    Set<Dimension> rewriteExprCtx(CubeQueryContext cubeql, StorageCandidate sc,
-        Map<Dimension, CandidateDim> dimsToQuery,
-      QueryAST queryAST) throws LensException {
+    Set<Dimension> rewriteExprCtx(CubeQueryContext cubeql, DimHQLContext sc, 
Map<Dimension, CandidateDim> dimsToQuery)
+      throws LensException {
       Set<Dimension> exprDims = new HashSet<Dimension>();
       log.info("Picking expressions for candidate {} ", sc);
       if (!allExprsQueried.isEmpty()) {
         // pick expressions for fact
-        if (sc != null) {
-          pickExpressionsForTable(sc);
+        if (sc.getStorageCandidate() != null) {
+          pickExpressionsForTable(sc.getStorageCandidate());
         }
         // pick expressions for dimensions
         if (dimsToQuery != null && !dimsToQuery.isEmpty()) {
@@ -399,12 +398,12 @@ class ExpressionResolver implements ContextRewriter {
           for (PickedExpression pe : peSet) {
             exprDims.addAll(pe.pickedCtx.exprDims);
             pe.initRewrittenAST(pe.pickedCtx.deNormCtx.hasReferences());
-            
exprDims.addAll(pe.pickedCtx.deNormCtx.rewriteDenormctxInExpression(cubeql, sc, 
dimsToQuery,
-              pe.getRewrittenAST()));
+            
exprDims.addAll(pe.pickedCtx.deNormCtx.rewriteDenormctxInExpression(cubeql,
+              sc.getStorageCandidate(), dimsToQuery, pe.getRewrittenAST()));
           }
         }
         // Replace picked expressions in all the base trees
-        replacePickedExpressions(sc, queryAST);
+        replacePickedExpressions(sc);
       }
 
       pickedExpressions.clear();
@@ -412,10 +411,11 @@ class ExpressionResolver implements ContextRewriter {
       return exprDims;
     }
 
-    private void replacePickedExpressions(StorageCandidate sc, QueryAST 
queryAST)
+    private void replacePickedExpressions(DimHQLContext sc)
       throws LensException {
+      QueryAST queryAST = sc.getQueryAst();
       replaceAST(cubeql, queryAST.getSelectAST());
-      if (sc != null) {
+      if (sc.getStorageCandidate() != null) {
         replaceAST(cubeql, sc.getQueryAst().getWhereAST());
       } else {
         replaceAST(cubeql, queryAST.getWhereAST());
@@ -423,8 +423,12 @@ class ExpressionResolver implements ContextRewriter {
       replaceAST(cubeql, queryAST.getJoinAST());
       replaceAST(cubeql, queryAST.getGroupByAST());
       // Having AST is resolved by each fact, so that all facts can expand 
their expressions.
-      // Having ast is not copied now, it's maintained in cubeql, each fact 
processes that serially.
-      replaceAST(cubeql, cubeql.getHavingAST());
+      // Having ast is not copied now, it's maintained in cubeQueryContext, 
each fact processes that serially.
+      if (queryAST.getHavingAST() != null) {
+        replaceAST(cubeql, queryAST.getHavingAST());
+      } else {
+        replaceAST(cubeql, cubeql.getHavingAST());
+      }
       replaceAST(cubeql, queryAST.getOrderByAST());
     }
 
@@ -626,28 +630,13 @@ class ExpressionResolver implements ContextRewriter {
           Set<ExpressionContext> ecSet = ecEntry.getValue();
           for (ExpressionContext ec : ecSet) {
             if (ec.getSrcTable().getName().equals(cubeql.getCube().getName())) 
{
-              if (cubeql.getQueriedExprsWithMeasures().contains(expr)) {
-                for (Iterator<Candidate> sItr = 
cubeql.getCandidates().iterator(); sItr.hasNext();) {
-                  Candidate cand = sItr.next();
-                  if (!cand.isExpressionEvaluable(ec)) {
-                    log.info("Not considering Candidate :{} as {} is not 
evaluable", cand, ec.exprCol.getName());
-                    sItr.remove();
-                    cubeql.addCandidatePruningMsg(cand,
-                        
CandidateTablePruneCause.expressionNotEvaluable(ec.exprCol.getName()));
-                  }
-                }
-              } else {
-                // prune dimension only expressions
-                Set<StorageCandidate> storageCandidates = 
CandidateUtil.getStorageCandidates(cubeql.getCandidates());
-                for (StorageCandidate sc : storageCandidates) {
-                  if (!sc.isExpressionEvaluable(ec)) {
-                    Collection<Candidate> prunedCandidates =
-                        CandidateUtil.filterCandidates(cubeql.getCandidates(), 
sc);
-                    log.info("Not considering candidate(s) :{} as expr :{} in 
storage :{} is not evaluable",
-                        prunedCandidates, ec.exprCol.getName(), sc);
-                    cubeql.addStoragePruningMsg(sc,
-                        
CandidateTablePruneCause.expressionNotEvaluable(ec.exprCol.getName()));
-                  }
+              for (Iterator<Candidate> sItr = 
cubeql.getCandidates().iterator(); sItr.hasNext();) {
+                Candidate cand = sItr.next();
+                if (!cand.isExpressionEvaluable(ec)) {
+                  log.info("Not considering Candidate :{} as {} is not 
evaluable", cand, ec.exprCol.getName());
+                  sItr.remove();
+                  cubeql.addCandidatePruningMsg(cand,
+                      
CandidateTablePruneCause.expressionNotEvaluable(ec.exprCol.getName()));
                 }
               }
             }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
index 1b30c0b..211f6ac 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/GroupbyResolver.java
@@ -166,7 +166,7 @@ class GroupbyResolver implements ContextRewriter {
     sel.setFinalAlias(!StringUtils.isBlank(selectFinalAlias) ? "`" + 
selectFinalAlias + "`" : selectAlias);
     sel.setActualAlias(alias != null ? alias.toLowerCase() : null);
     cubeql.getSelectPhrases().add(exprInd, sel);
-    //cubeql.addSelectPhrase(sel);
+    //cubeQueryContext.addSelectPhrase(sel);
   }
 
   private void addChildAtIndex(int index, ASTNode parent, ASTNode child) {

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java
deleted file mode 100644
index 78d448a..0000000
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLContextInterface.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.lens.cube.parse;
-
-import org.apache.lens.server.api.error.LensException;
-
-
-/**
- * HQL context holding the ql expressions
- */
-public interface HQLContextInterface {
-
-  /**
-   * Get the HQL query.
-   *
-   * @return query string
-   * @throws LensException
-   */
-  String toHQL() throws LensException;
-
-  /**
-   * Get select expression.
-   *
-   * @return select
-   */
-  String getSelect();
-
-  /**
-   * Get from string
-   *
-   * @return from
-   */
-  String getFrom();
-
-  /**
-   * Get where string
-   *
-   * @return where
-   */
-  String getWhere();
-
-  /**
-   * Get groupby string
-   *
-   * @return groupby
-   */
-  String getGroupby();
-
-  /**
-   * Get having string
-   *
-   * @return having
-   */
-  String getHaving();
-
-  /**
-   * Get orderby string
-   *
-   * @return orderby
-   */
-  String getOrderby();
-
-  /**
-   * Get limit
-   *
-   * @return limit
-   */
-  Integer getLimit();
-}

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
index 8a70535..38a061e 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/HQLParser.java
@@ -721,11 +721,6 @@ public final class HQLParser {
     }
   }
 
-  public static void main(String[] args) throws Exception {
-    ASTNode ast = parseHQL("select * from default_table ", new HiveConf());
-
-    printAST(getHiveTokenMapping(), ast, 0, 0);
-  }
 
   public static String getString(ASTNode tree, AppendMode appendMode) {
     StringBuilder buf = new StringBuilder();
@@ -909,23 +904,10 @@ public final class HQLParser {
   }
   @Data
   public static class HashableASTNode {
-    private ASTNode ast;
+    private final ASTNode ast;
     private int hashCode = -1;
     private boolean hashCodeComputed = false;
 
-    public HashableASTNode(ASTNode ast) {
-      this.ast = ast;
-    }
-
-    public void setAST(ASTNode ast) {
-      this.ast = ast;
-      hashCodeComputed = false;
-    }
-
-    public ASTNode getAST() {
-      return ast;
-    }
-
     @Override
     public int hashCode() {
       if (!hashCodeComputed) {
@@ -937,8 +919,8 @@ public final class HQLParser {
 
     @Override
     public boolean equals(Object o) {
-      return o instanceof HashableASTNode && this.hashCode() == o.hashCode() 
&& getString(this.getAST())
-        .trim().equalsIgnoreCase(getString(((HashableASTNode) 
o).getAST()).trim());
+      return o instanceof HashableASTNode && this.hashCode() == o.hashCode() 
&& getString(this.getAst())
+        .trim().equalsIgnoreCase(getString(((HashableASTNode) 
o).getAst()).trim());
     }
   }
 
@@ -953,4 +935,61 @@ public final class HQLParser {
       return s;
     }
   }
+  static ASTNode trimHavingAst(ASTNode astNode, Collection<String> columns) {
+    if (astNode != null) {
+      if (astNode.getParent() != null && astNode.getParent().getType() == DOT 
&& astNode.getChildIndex() == 1) {
+        return columns.contains(astNode.getText()) ? astNode : null;
+      }
+      for (int i = astNode.getChildCount() - 1; i >= 0; i--) {
+        ASTNode replacement = trimHavingAst((ASTNode) astNode.getChild(i), 
columns);
+        if (replacement == null) {
+          astNode.deleteChild(i);
+        } else {
+          astNode.setChild(i, replacement);
+        }
+      }
+      if (isAggregateAST(astNode) || 
BINARY_OPERATORS.contains(astNode.getType())) {
+        if (astNode.getChildCount() == 1) {
+          ASTNode child = (ASTNode) astNode.getChild(0);
+          if (!BINARY_OPERATORS.contains(child.getType())) {
+            return null;
+          } else {
+            return child;
+          }
+        }
+      }
+    }
+    return astNode;
+  }
+  static ASTNode trimOrderByAst(ASTNode astNode, Collection<String> columns) {
+    if (astNode != null) {
+      if (astNode.getParent() != null && astNode.getParent().getType() == DOT 
&& astNode.getChildIndex() == 1) {
+        return columns.contains(astNode.getText()) ? astNode : null;
+      }
+      for (int i = astNode.getChildCount() - 1; i >= 0; i--) {
+        ASTNode replacement = trimOrderByAst((ASTNode) astNode.getChild(i), 
columns);
+        if (replacement == null) {
+          astNode.deleteChild(i);
+        } else {
+          astNode.setChild(i, replacement);
+        }
+      }
+      switch (astNode.getType()) {
+      case DOT:
+        if (astNode.getChildCount() < 2) {
+          return null;
+        }
+        break;
+      case TOK_TABSORTCOLNAMEASC:
+      case TOK_TABSORTCOLNAMEDESC:
+      case TOK_NULLS_FIRST:
+      case TOK_NULLS_LAST:
+        if (astNode.getChildCount() == 0) {
+          return null;
+        }
+        break;
+      }
+    }
+    return astNode;
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
index 52085ea..038d689 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinCandidate.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,12 +18,20 @@
  */
 package org.apache.lens.cube.parse;
 
+import static java.lang.Long.MAX_VALUE;
+import static java.lang.Long.MIN_VALUE;
+import static java.util.stream.Collectors.joining;
+import static java.util.stream.Collectors.toSet;
+
 import java.util.*;
 
 import org.apache.lens.cube.metadata.FactPartition;
 import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
+import com.google.common.collect.Lists;
+import lombok.Getter;
+
 /**
  * Represents a join of two candidates
  */
@@ -32,69 +40,54 @@ public class JoinCandidate implements Candidate {
   /**
    * Child candidates that will participate in the join
    */
-  private Candidate childCandidate1;
-  private Candidate childCandidate2;
+  @Getter
+  private List<Candidate> children;
   private String toStr;
-  private QueryAST queryAST;
-  private CubeQueryContext cubeql;
+  @Getter
+  private CubeQueryContext cubeQueryContext;
 
   public JoinCandidate(Candidate childCandidate1, Candidate childCandidate2, 
CubeQueryContext cubeql) {
-    this.childCandidate1 = childCandidate1;
-    this.childCandidate2 = childCandidate2;
-    this.cubeql = cubeql;
+    children = Lists.newArrayList(childCandidate1, childCandidate2);
+    this.cubeQueryContext = cubeql;
   }
 
   @Override
   public Collection<String> getColumns() {
     Set<String> columns = new HashSet<>();
-    columns.addAll(childCandidate1.getColumns());
-    columns.addAll(childCandidate2.getColumns());
+    for (Candidate child : children) {
+      columns.addAll(child.getColumns());
+    }
     return columns;
   }
 
   @Override
   public Date getStartTime() {
-    return childCandidate1.getStartTime().after(childCandidate2.getStartTime())
-        ? childCandidate1.getStartTime() : childCandidate2.getStartTime();
+    return 
children.stream().map(Candidate::getStartTime).max(Comparator.naturalOrder()).orElse(new
 Date(MIN_VALUE));
   }
 
   @Override
   public Date getEndTime() {
-    return childCandidate1.getEndTime().before(childCandidate2.getEndTime())
-        ? childCandidate1.getEndTime() : childCandidate2.getEndTime();
+    return 
children.stream().map(Candidate::getEndTime).min(Comparator.naturalOrder()).orElse(new
 Date(MAX_VALUE));
   }
 
   @Override
   public double getCost() {
-    return childCandidate1.getCost() + childCandidate2.getCost();
+    return children.stream().mapToDouble(Candidate::getCost).sum();
   }
 
   @Override
-  public boolean contains(Candidate candidate) {
-    if (this.equals(candidate)) {
-      return true;
-    } else {
-      return childCandidate1.contains(candidate) || 
childCandidate2.contains(candidate);
-    }
+  public boolean contains(final Candidate candidate) {
+    return this.equals(candidate) || children.stream().anyMatch(c -> 
c.contains(candidate));
   }
-
-  @Override
-  public Collection<Candidate> getChildren() {
-    ArrayList<Candidate> joinCandidates = new ArrayList<>();
-    joinCandidates.add(childCandidate1);
-    joinCandidates.add(childCandidate2);
-    return joinCandidates;
-  }
-
-  /**
-   * @param timeRange
-   * @return
-   */
   @Override
   public boolean evaluateCompleteness(TimeRange timeRange, TimeRange 
parentTimeRange, boolean failOnPartialData)
     throws LensException {
-    return this.childCandidate1.evaluateCompleteness(timeRange, 
parentTimeRange, failOnPartialData)
-        && this.childCandidate2.evaluateCompleteness(timeRange, 
parentTimeRange, failOnPartialData);
+    for (Candidate child : children) {
+      if (!child.evaluateCompleteness(timeRange, parentTimeRange, 
failOnPartialData)) {
+        return false;
+      }
+    }
+    return true;
   }
 
   /**
@@ -102,30 +95,71 @@ public class JoinCandidate implements Candidate {
    */
   @Override
   public Set<FactPartition> getParticipatingPartitions() {
-    Set<FactPartition> factPartitionsSet = new HashSet<>();
-    factPartitionsSet.addAll(childCandidate1.getParticipatingPartitions());
-    factPartitionsSet.addAll(childCandidate2.getParticipatingPartitions());
-    return factPartitionsSet;
+    return 
children.stream().map(Candidate::getParticipatingPartitions).flatMap(Collection::stream).collect(toSet());
   }
 
   @Override
   public boolean isExpressionEvaluable(ExpressionResolver.ExpressionContext 
expr) {
-    return childCandidate1.isExpressionEvaluable(expr) || 
childCandidate2.isExpressionEvaluable(expr);
+    // implied that expression always has measure
+    return children.stream().anyMatch(x->x.isExpressionEvaluable(expr));
+  }
+
+  @Override
+  public boolean isExpressionEvaluable(String expr) {
+    return children.stream().anyMatch(x->x.isExpressionEvaluable(expr));
+  }
+
+  @Override
+  public boolean isDimAttributeEvaluable(String dim) throws LensException {
+    for (Candidate childCandidate : children) {
+      if (childCandidate.isDimAttributeEvaluable(dim)) {
+        return true;
+      }
+    }
+    return false;
   }
 
   @Override
   public Set<Integer> getAnswerableMeasurePhraseIndices() {
-    Set<Integer> mesureIndices = new HashSet<>();
+    return children.stream().map(Candidate::getAnswerableMeasurePhraseIndices)
+      .flatMap(Collection::stream).collect(toSet());
+  }
+
+  @Override
+  public boolean isPhraseAnswerable(QueriedPhraseContext phrase) throws 
LensException {
     for (Candidate cand : getChildren()) {
-      mesureIndices.addAll(cand.getAnswerableMeasurePhraseIndices());
+      if (!cand.isPhraseAnswerable(phrase)) {
+        return false;
+      }
     }
-    return mesureIndices;
+    return true;
+  }
+
+  @Override
+  public void addAnswerableMeasurePhraseIndices(int index) {
+    throw new IllegalArgumentException("Join candidates can't add answerable 
phrase indices");
+  }
+
+  @Override
+  public Optional<Date> getColumnStartTime(String column) {
+    return 
children.stream().map(x->x.getColumnStartTime(column)).filter(Optional::isPresent).map(Optional::get)
+      .max(Comparator.naturalOrder());
+  }
+
+  @Override
+  public Optional<Date> getColumnEndTime(String column) {
+    return 
children.stream().map(x->x.getColumnEndTime(column)).filter(Optional::isPresent).map(Optional::get)
+      .min(Comparator.naturalOrder());
   }
 
   @Override
   public boolean isTimeRangeCoverable(TimeRange timeRange) throws 
LensException {
-    return this.childCandidate1.isTimeRangeCoverable(timeRange)
-      && this.childCandidate2.isTimeRangeCoverable(timeRange);
+    for (Candidate candidate : getChildren()) {
+      if (!candidate.isTimeRangeCoverable(timeRange)) {
+        return false;
+      }
+    }
+    return true;
   }
 
   @Override
@@ -135,8 +169,15 @@ public class JoinCandidate implements Candidate {
     }
     return this.toStr;
   }
+  public JoinCandidate explode() throws LensException {
+    ListIterator<Candidate> i = children.listIterator();
+    while(i.hasNext()) {
+      i.set(i.next().explode());
+    }
+    return this;
+  }
 
   private String getToString() {
-    return "JOIN[" + childCandidate1.toString() + ", " + 
childCandidate2.toString() + "]";
+    return children.stream().map(Object::toString).collect(joining("; ", 
"JOIN[", "]"));
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
index 02e3dc7..41651bc 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/JoinResolver.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -41,15 +41,14 @@ import lombok.extern.slf4j.Slf4j;
 @Slf4j
 class JoinResolver implements ContextRewriter {
   private Map<AbstractCubeTable, JoinType> tableJoinTypeMap;
-  private AbstractCubeTable target;
   /**
    * Dimension as key and all the participating join chains for this dimension 
as value.
    */
-  private HashMap<Dimension, List<JoinChain>> dimensionToJoinChainsMap = new 
HashMap<Dimension, List<JoinChain>>();
+  private HashMap<Dimension, List<JoinChain>> dimensionToJoinChainsMap = new 
HashMap<>();
 
   @Override
   public void rewriteContext(CubeQueryContext cubeql) throws LensException {
-    tableJoinTypeMap = new HashMap<AbstractCubeTable, JoinType>();
+    tableJoinTypeMap = new HashMap<>();
     try {
       resolveJoins(cubeql);
     } catch (HiveException e) {
@@ -94,10 +93,7 @@ class JoinResolver implements ContextRewriter {
       dims.add(chain.getDestTable());
       for (String dim : dims) {
         Dimension dimension = cubeql.getMetastoreClient().getDimension(dim);
-        if (dimensionToJoinChainsMap.get(dimension) == null) {
-          dimensionToJoinChainsMap.put(dimension, new ArrayList<JoinChain>());
-        }
-        dimensionToJoinChainsMap.get(dimension).add(chain);
+        dimensionToJoinChainsMap.computeIfAbsent(dimension, k -> new 
ArrayList<>()).add(chain);
       }
     }
   }
@@ -105,9 +101,9 @@ class JoinResolver implements ContextRewriter {
   /**
    * Resolve joins automatically for the given query.
    *
-   * @param cubeql
-   * @throws LensException
-   * @throws HiveException
+   * @param cubeql cube query context
+   * @throws LensException lens exception
+   * @throws HiveException hive exception
    */
   private void autoResolveJoins(CubeQueryContext cubeql) throws LensException, 
HiveException {
     if (cubeql.getJoinchains().isEmpty()) {
@@ -117,6 +113,7 @@ class JoinResolver implements ContextRewriter {
     }
     processJoinChains(cubeql);
     // Find the target
+    AbstractCubeTable target;
     if (cubeql.hasCubeInQuery()) {
       // Only cube in the query
       target = (AbstractCubeTable) cubeql.getCube();
@@ -146,11 +143,8 @@ class JoinResolver implements ContextRewriter {
     for (JoinChain chain : cubeql.getJoinchains().values()) {
       Dimension dimension = 
cubeql.getMetastoreClient().getDimension(chain.getDestTable());
       Aliased<Dimension> aliasedDimension = Aliased.create(dimension, 
chain.getName());
-      if (multipleJoinPaths.get(aliasedDimension) == null) {
-        multipleJoinPaths.put(aliasedDimension, new ArrayList<JoinPath>());
-      }
-      multipleJoinPaths.get(aliasedDimension).addAll(
-        chain.getRelationEdges(cubeql.getMetastoreClient()));
+      multipleJoinPaths.computeIfAbsent(aliasedDimension, k -> new 
ArrayList<>())
+        .addAll(chain.getRelationEdges(cubeql.getMetastoreClient()));
     }
 
     boolean flattenBridgeTables = 
cubeql.getConf().getBoolean(CubeQueryConfUtil.ENABLE_FLATTENING_FOR_BRIDGETABLES,
@@ -229,9 +223,7 @@ class JoinResolver implements ContextRewriter {
 
       String[] leftChildAliases = leftTree.getLeftAliases();
       String[] leftAliases = new String[leftChildAliases.length + 1];
-      for (int i = 0; i < leftChildAliases.length; i++) {
-        leftAliases[i] = leftChildAliases[i];
-      }
+      System.arraycopy(leftChildAliases, 0, leftAliases, 0, 
leftChildAliases.length);
       leftAliases[leftChildAliases.length] = leftTree.getRightAliases()[0];
       joinTree.setLeftAliases(leftAliases);
 

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
index 5adbc23..22b1d03 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/LeastPartitionResolver.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -43,14 +43,17 @@ class LeastPartitionResolver implements ContextRewriter {
       // This seems fine, as the less number of time values actually represent 
the rollups on time. And with
       // MaxCoveringFactResolver candidates with less partitions which are not 
covering the range would be removed.
       for (Candidate candidate : cubeql.getCandidates()) {
-        factPartCount.put(candidate, 
candidate.getParticipatingPartitions().size());
+        int parts = candidate.getParticipatingPartitions().size();
+        if (parts > 0) {
+          factPartCount.put(candidate, parts);
+        }
       }
 
       double minPartitions = Collections.min(factPartCount.values());
 
       for (Iterator<Candidate> i = cubeql.getCandidates().iterator(); 
i.hasNext();) {
         Candidate candidate = i.next();
-        if (factPartCount.get(candidate) > minPartitions) {
+        if (factPartCount.containsKey(candidate) && 
factPartCount.get(candidate) > minPartitions) {
           log.info("Not considering Candidate:{} as it requires more 
partitions to be" + " queried:{} minimum:{}",
             candidate, factPartCount.get(candidate), minPartitions);
           i.remove();

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiCandidateQueryWriterContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiCandidateQueryWriterContext.java
 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiCandidateQueryWriterContext.java
new file mode 100644
index 0000000..d57c027
--- /dev/null
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/MultiCandidateQueryWriterContext.java
@@ -0,0 +1,93 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import java.util.List;
+
+import org.apache.lens.server.api.error.LensException;
+
+import com.google.common.collect.Lists;
+import lombok.Getter;
+
+/**
+ * Created on 31/03/17.
+ */
+public class MultiCandidateQueryWriterContext implements QueryWriterContext {
+  @Getter
+  private List<QueryWriterContext> children;
+  @Getter
+  private CubeQueryContext cubeQueryContext;
+
+  public MultiCandidateQueryWriterContext(List<QueryWriterContext> children, 
CubeQueryContext cubeQueryContext) {
+    this.children = children;
+    this.cubeQueryContext = cubeQueryContext;
+  }
+
+  public void addAutoJoinDims() throws LensException {
+    for (QueryWriterContext candidate : getChildren()) {
+      candidate.addAutoJoinDims();
+    }
+  }
+
+  public void addExpressionDims() throws LensException {
+    for (QueryWriterContext candidate : getChildren()) {
+      candidate.addExpressionDims();
+    }
+  }
+
+  public void addDenormDims() throws LensException {
+    for (QueryWriterContext candidate : getChildren()) {
+      candidate.addDenormDims();
+    }
+  }
+
+  public void updateDimFilterWithFactFilter() throws LensException {
+    for (QueryWriterContext candidate : getChildren()) {
+      candidate.updateDimFilterWithFactFilter();
+    }
+  }
+
+  @Override
+  public QueryAST getQueryAst() {
+    return getCubeQueryContext();
+  }
+
+  @Override
+  public void updateFromString() throws LensException {
+    for (QueryWriterContext queryWriterContext : getChildren()) {
+      queryWriterContext.updateFromString();
+    }
+  }
+  private List<StorageCandidateHQLContext> getLeafQueryWriterContexts() {
+    List<StorageCandidateHQLContext> ret = Lists.newArrayList();
+    for (QueryWriterContext queryWriterContext : getChildren()) {
+      if (queryWriterContext instanceof MultiCandidateQueryWriterContext) {
+        ret.addAll(((MultiCandidateQueryWriterContext) 
queryWriterContext).getLeafQueryWriterContexts());
+      } else {
+        ret.add((StorageCandidateHQLContext) queryWriterContext);
+      }
+    }
+    return ret;
+  }
+  @Override
+  public UnionQueryWriter toQueryWriter() throws LensException {
+    List<StorageCandidateHQLContext> leafWriterContexts = 
getLeafQueryWriterContexts();
+    return new UnionQueryWriter(leafWriterContexts, getCubeQueryContext());
+  }
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
index 0996db5..e1e8e1b 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/PruneCauses.java
@@ -22,6 +22,7 @@ import static java.util.stream.Collectors.toMap;
 
 import static com.google.common.collect.Sets.newHashSet;
 
+import java.io.ByteArrayOutputStream;
 import java.util.ArrayList;
 import java.util.Comparator;
 import java.util.HashMap;
@@ -30,10 +31,12 @@ import java.util.List;
 import java.util.Map;
 
 import 
org.apache.lens.cube.parse.CandidateTablePruneCause.CandidateTablePruneCode;
+import org.apache.lens.server.api.error.LensException;
 
 import org.apache.commons.lang.StringUtils;
 
 import org.codehaus.jackson.annotate.JsonWriteNullProperties;
+import org.codehaus.jackson.map.ObjectMapper;
 
 import com.google.common.collect.Maps;
 import lombok.AllArgsConstructor;
@@ -47,7 +50,7 @@ public class PruneCauses<T> extends HashMap<T, 
List<CandidateTablePruneCause>> {
   @Getter(lazy = true)
   private final HashMap<String, List<CandidateTablePruneCause>> compact = 
computeCompact();
   @Getter(lazy = true)
-  private final CandidateTablePruneCode maxCause  = computeMaxCause();
+  private final CandidateTablePruneCode maxCause = computeMaxCause();
 
   private HashMap<String, List<CandidateTablePruneCause>> computeCompact() {
     HashMap<String, List<CandidateTablePruneCause>> detailedMessage = 
Maps.newHashMap();
@@ -61,11 +64,9 @@ public class PruneCauses<T> extends HashMap<T, 
List<CandidateTablePruneCause>> {
   @Getter(lazy = true)
   private final BriefAndDetailedError jsonObject = toJsonObject();
 
+
   public void addPruningMsg(T table, CandidateTablePruneCause msg) {
-    if (get(table) == null) {
-      put(table, new ArrayList<CandidateTablePruneCause>());
-    }
-    get(table).add(msg);
+    computeIfAbsent(table, x -> new ArrayList()).add(msg);
   }
 
   private HashMap<CandidateTablePruneCause, List<T>> reverse() {
@@ -108,6 +109,11 @@ public class PruneCauses<T> extends HashMap<T, 
List<CandidateTablePruneCause>> {
     new BriefAndDetailedError();
   }
 
+  String toJsonString() throws LensException {
+    return getJsonObject().toJsonString();
+  }
+
+
   @JsonWriteNullProperties(false)
   @Data
   @AllArgsConstructor
@@ -121,5 +127,14 @@ public class PruneCauses<T> extends HashMap<T, 
List<CandidateTablePruneCause>> {
         o -> newHashSet(o.getKey().split(",")),
         Map.Entry::getValue));
     }
+    String toJsonString() throws LensException {
+      try(ByteArrayOutputStream out = new ByteArrayOutputStream()) {
+        ObjectMapper mapper = new ObjectMapper();
+        mapper.writeValue(out, this);
+        return out.toString("UTF-8");
+      } catch (Exception e) {
+        throw new LensException("Error writing fact pruning messages", e);
+      }
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
index 310a655..69e3e73 100644
--- 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
+++ 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueriedPhraseContext.java
@@ -20,16 +20,12 @@ package org.apache.lens.cube.parse;
 
 import java.util.*;
 
-import org.apache.lens.cube.metadata.MetastoreConstants;
-import org.apache.lens.cube.metadata.TimeRange;
 import org.apache.lens.server.api.error.LensException;
 
-import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 
 import lombok.Data;
 import lombok.EqualsAndHashCode;
-import lombok.NonNull;
 import lombok.extern.slf4j.Slf4j;
 
 @Data
@@ -101,91 +97,37 @@ class QueriedPhraseContext extends TracksQueriedColumns 
implements TrackQueriedC
   }
 
   /**
-   * @param cubeQl
-   * @param sc
+   * @param candidate
    * @return
    * @throws LensException
    */
-  public boolean isEvaluable(CubeQueryContext cubeQl, StorageCandidate sc) 
throws LensException {
+  public boolean isEvaluable(StorageCandidate candidate) throws LensException {
     // all measures of the queried phrase should be present
     for (String msr : queriedMsrs) {
-      if (!checkForColumnExistsAndValidForRange(sc, msr, cubeQl)) {
+      if (!candidate.isColumnPresentAndValidForRange(msr)) {
         return false;
       }
     }
     // all expression columns should be evaluable
     for (String exprCol : queriedExprColumns) {
-      if (!cubeQl.getExprCtx().isEvaluable(exprCol, sc)) {
-        log.info("expression {} is not evaluable in fact table:{}", expr, sc);
+      if (!candidate.isExpressionEvaluable(exprCol)) {
+        log.info("expression {} is not evaluable in fact table:{}", expr, 
candidate);
         return false;
       }
     }
     // all dim-attributes should be present.
     for (String col : queriedDimAttrs) {
-      if (!sc.getColumns().contains(col.toLowerCase())) {
+      if (!candidate.getColumns().contains(col.toLowerCase())) {
         // check if it available as reference
-        if (!cubeQl.getDeNormCtx().addRefUsage(cubeQl, sc, col, 
cubeQl.getCube().getName())) {
-          log.info("column {} is not available in fact table:{} ", col, sc);
+        if (!candidate.isDimAttributeEvaluable(col)) {
+          log.info("column {} is not available in fact table:{} ", col, 
candidate);
           return false;
         }
-      } else if (!isFactColumnValidForRange(cubeQl, sc, col)) {
-        log.info("column {} is not available in range queried in fact {}", 
col, sc);
+      } else if (!candidate.isColumnValidForRange(col)) {
+        log.info("column {} is not available in range queried in fact {}", 
col, candidate);
         return false;
       }
     }
     return true;
   }
-
-  private static boolean isColumnAvailableInRange(final TimeRange range, Date 
startTime, Date endTime) {
-    return (isColumnAvailableFrom(range.getFromDate(), startTime)
-        && isColumnAvailableTill(range.getToDate(), endTime));
-  }
-
-  private static boolean isColumnAvailableFrom(@NonNull final Date date, Date 
startTime) {
-    return (startTime == null) || date.equals(startTime) || 
date.after(startTime);
-  }
-
-  private static boolean isColumnAvailableTill(@NonNull final Date date, Date 
endTime) {
-    return (endTime == null) || date.equals(endTime) || date.before(endTime);
-  }
-
-  public static boolean isFactColumnValidForRange(CubeQueryContext cubeql, 
StorageCandidate sc, String col) {
-    for (TimeRange range : cubeql.getTimeRanges()) {
-      if (!isColumnAvailableInRange(range, getFactColumnStartTime(sc, col), 
getFactColumnEndTime(sc, col))) {
-        return false;
-      }
-    }
-    return true;
-  }
-
-  public static Date getFactColumnStartTime(StorageCandidate sc, String 
factCol) {
-    Date startTime = null;
-    for (String key : sc.getTable().getProperties().keySet()) {
-      if (key.contains(MetastoreConstants.FACT_COL_START_TIME_PFX)) {
-        String propCol = StringUtils.substringAfter(key, 
MetastoreConstants.FACT_COL_START_TIME_PFX);
-        if (factCol.equals(propCol)) {
-          startTime = sc.getTable().getDateFromProperty(key, false, true);
-        }
-      }
-    }
-    return startTime;
-  }
-
-  public static Date getFactColumnEndTime(StorageCandidate sc, String factCol) 
{
-    Date endTime = null;
-    for (String key : sc.getTable().getProperties().keySet()) {
-      if (key.contains(MetastoreConstants.FACT_COL_END_TIME_PFX)) {
-        String propCol = StringUtils.substringAfter(key, 
MetastoreConstants.FACT_COL_END_TIME_PFX);
-        if (factCol.equals(propCol)) {
-          endTime = sc.getTable().getDateFromProperty(key, false, true);
-        }
-      }
-    }
-    return endTime;
-  }
-
-  static boolean checkForColumnExistsAndValidForRange(StorageCandidate sc, 
String column, CubeQueryContext cubeql) {
-    return (sc.getColumns().contains(column) && 
isFactColumnValidForRange(cubeql, sc, column));
-  }
-
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
----------------------------------------------------------------------
diff --git a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
index b94f131..1ed954f 100644
--- a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryAST.java
@@ -1,4 +1,4 @@
-/**
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -18,6 +18,8 @@
  */
 package org.apache.lens.cube.parse;
 
+import org.apache.lens.cube.metadata.MetastoreUtil;
+
 import org.apache.hadoop.hive.ql.parse.ASTNode;
 
 public interface QueryAST {
@@ -87,6 +89,26 @@ public interface QueryAST {
   void setJoinAST(ASTNode node);
 
   void setFromString(String fromString);
+
   void setWhereString(String whereString);
 
+  default void copyFrom(QueryAST sourceAst) {
+    setSelectAST(MetastoreUtil.copyAST(sourceAst.getSelectAST()));
+    setWhereAST(MetastoreUtil.copyAST(sourceAst.getWhereAST()));
+    if (sourceAst.getJoinAST() != null) {
+      setJoinAST(MetastoreUtil.copyAST(sourceAst.getJoinAST()));
+    }
+    if (sourceAst.getGroupByAST() != null) {
+      setGroupByAST(MetastoreUtil.copyAST(sourceAst.getGroupByAST()));
+    }
+    if (sourceAst.getHavingAST() != null) {
+      setHavingAST(MetastoreUtil.copyAST(sourceAst.getHavingAST()));
+    }
+    if (sourceAst.getOrderByAST() != null) {
+      setOrderByAST(MetastoreUtil.copyAST(sourceAst.getOrderByAST()));
+    }
+    setLimitValue(sourceAst.getLimitValue());
+    setFromString(sourceAst.getFromString());
+    setWhereString(sourceAst.getWhereString());
+  }
 }

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryWriter.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryWriter.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryWriter.java
new file mode 100644
index 0000000..44506a3
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryWriter.java
@@ -0,0 +1,31 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import org.apache.lens.server.api.error.LensException;
+
+public interface QueryWriter {
+  /**
+   * Get the HQL query.
+   *
+   * @return query string
+   * @throws LensException
+   */
+  String toHQL() throws LensException;
+}

http://git-wip-us.apache.org/repos/asf/lens/blob/b58749e2/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryWriterContext.java
----------------------------------------------------------------------
diff --git 
a/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryWriterContext.java 
b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryWriterContext.java
new file mode 100644
index 0000000..d55de1f
--- /dev/null
+++ b/lens-cube/src/main/java/org/apache/lens/cube/parse/QueryWriterContext.java
@@ -0,0 +1,34 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.lens.cube.parse;
+
+import org.apache.lens.server.api.error.LensException;
+
+/**
+ * Created on 03/04/17.
+ */
+public interface QueryWriterContext {
+  void addAutoJoinDims() throws LensException;
+  void addExpressionDims() throws LensException;
+  void addDenormDims() throws LensException;
+  void updateDimFilterWithFactFilter() throws LensException;
+  QueryAST getQueryAst();
+  void updateFromString() throws LensException;
+  QueryWriter toQueryWriter() throws LensException;
+}

Reply via email to