http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
new file mode 100644
index 0000000..7b29c8b
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
@@ -0,0 +1,604 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.core.scan.executor.infos;
+
+import java.util.Map;
+
+import org.apache.carbondata.core.cache.dictionary.Dictionary;
+import org.apache.carbondata.core.AbsoluteTableIdentifier;
+import org.apache.carbondata.core.datastore.DataRefNode;
+import org.apache.carbondata.core.datastore.IndexKey;
+import org.apache.carbondata.core.datastore.block.AbstractIndex;
+import org.apache.carbondata.core.keygenerator.KeyGenerator;
+import org.apache.carbondata.core.scan.filter.GenericQueryType;
+import org.apache.carbondata.core.scan.filter.executer.FilterExecuter;
+import org.apache.carbondata.core.scan.model.QueryDimension;
+import org.apache.carbondata.core.scan.model.QueryMeasure;
+
+/**
+ * Below class will have all the properties which needed during query execution
+ * for one block
+ */
+public class BlockExecutionInfo {
+
+  /**
+   * block on which query will be executed
+   */
+  private AbstractIndex blockIndex;
+
+  /**
+   * each segment key size can be different and in that case we need to update
+   * the fixed key with latest segment key generator. so this property will
+   * tell whether this is required or not if key size is same then it is not
+   * required
+   */
+  private boolean isFixedKeyUpdateRequired;
+
+  /**
+   * below to store all the information required for aggregation during query
+   * execution
+   */
+  private AggregatorInfo aggregatorInfo;
+
+  /**
+   * this will be used to get the first tentative block from which query
+   * execution start, this will be useful in case of filter query to get the
+   * start block based on filter values
+   */
+  private IndexKey startKey;
+
+  /**
+   * this will be used to get the last tentative block till which scanning
+   * will be done, this will be useful in case of filter query to get the last
+   * block based on filter values
+   */
+  private IndexKey endKey;
+
+  private String blockId;
+
+  /**
+   * masked byte for block which will be used to unpack the fixed length key,
+   * this will be used for updating the older block key with new block key
+   * generator
+   */
+  private int[] maskedByteForBlock;
+
+  /**
+   * total number of dimension in block
+   */
+  private int totalNumberDimensionBlock;
+
+  /**
+   * total number of measure in block
+   */
+  private int totalNumberOfMeasureBlock;
+
+  /**
+   * will be used to read the dimension block from file
+   */
+  private int[][] allSelectedDimensionBlocksIndexes;
+
+  /**
+   * will be used to read the measure block from file
+   */
+  private int[][] allSelectedMeasureBlocksIndexes;
+
+  /**
+   * this will be used to update the older block fixed length keys with the
+   * new block fixed length key
+   */
+  private KeyStructureInfo keyStructureInfo;
+
+  /**
+   * first block from which query execution will start
+   */
+  private DataRefNode firstDataBlock;
+
+  /**
+   * number of block to be scanned in the query
+   */
+  private long numberOfBlockToScan;
+
+  /**
+   * key size of the fixed length dimension column
+   */
+  private int fixedLengthKeySize;
+
+  /**
+   * dictionary column block indexes based on query
+   */
+  private int[] dictionaryColumnBlockIndex;
+  /**
+   * no dictionary column block indexes in based on the query order
+   */
+  private int[] noDictionaryBlockIndexes;
+
+  /**
+   * key generator used for generating the table block fixed length key
+   */
+  private KeyGenerator blockKeyGenerator;
+
+  /**
+   * each column value size
+   */
+  private int[] eachColumnValueSize;
+
+  /**
+   * column group block index in file to key structure info mapping
+   */
+  private Map<Integer, KeyStructureInfo> columnGroupToKeyStructureInfo;
+
+  /**
+   * mapping of dictionary dimension to its dictionary mapping which will be
+   * used to get the actual data from dictionary for aggregation, sorting
+   */
+  private Map<String, Dictionary> columnIdToDcitionaryMapping;
+
+  /**
+   * filter tree to execute the filter
+   */
+  private FilterExecuter filterExecuterTree;
+
+  /**
+   * whether it needs only raw byte records with out aggregation.
+   */
+  private boolean isRawRecordDetailQuery;
+
+  /**
+   * start index of blocklets
+   */
+  private int startBlockletIndex;
+
+  /**
+   * number of blocklet to be scanned
+   */
+  private int numberOfBlockletToScan;
+
+  /**
+   * complexParentIndexToQueryMap
+   */
+  private Map<Integer, GenericQueryType> complexParentIndexToQueryMap;
+
+  /**
+   * complex dimension parent block indexes;
+   */
+  private int[] complexColumnParentBlockIndexes;
+
+  /**
+   * @return the tableBlock
+   */
+  public AbstractIndex getDataBlock() {
+    return blockIndex;
+  }
+
+  /**
+   * list of dimension selected for in query
+   */
+  private QueryDimension[] queryDimensions;
+
+  /**
+   * list of measure selected in query
+   */
+  private QueryMeasure[] queryMeasures;
+
+  /**
+   * whether it needs to read data in vector/columnar format.
+   */
+  private boolean vectorBatchCollector;
+
+  /**
+   * absolute table identifier
+   */
+  private AbsoluteTableIdentifier absoluteTableIdentifier;
+
+  public AbsoluteTableIdentifier getAbsoluteTableIdentifier() {
+    return absoluteTableIdentifier;
+  }
+
+  public void setAbsoluteTableIdentifier(AbsoluteTableIdentifier 
absoluteTableIdentifier) {
+    this.absoluteTableIdentifier = absoluteTableIdentifier;
+  }
+
+  /**
+   * @param blockIndex the tableBlock to set
+   */
+  public void setDataBlock(AbstractIndex blockIndex) {
+    this.blockIndex = blockIndex;
+  }
+
+  /**
+   * @return the isFixedKeyUpdateRequired
+   */
+  public boolean isFixedKeyUpdateRequired() {
+    return isFixedKeyUpdateRequired;
+  }
+
+  /**
+   * @param isFixedKeyUpdateRequired the isFixedKeyUpdateRequired to set
+   */
+  public void setFixedKeyUpdateRequired(boolean isFixedKeyUpdateRequired) {
+    this.isFixedKeyUpdateRequired = isFixedKeyUpdateRequired;
+  }
+
+  /**
+   * @return the aggregatorInfos
+   */
+  public AggregatorInfo getAggregatorInfo() {
+    return aggregatorInfo;
+  }
+
+  /**
+   * @param aggregatorInfo the aggregatorInfos to set
+   */
+  public void setAggregatorInfo(AggregatorInfo aggregatorInfo) {
+    this.aggregatorInfo = aggregatorInfo;
+  }
+
+  /**
+   * @return the startKey
+   */
+  public IndexKey getStartKey() {
+    return startKey;
+  }
+
+  /**
+   * @param startKey the startKey to set
+   */
+  public void setStartKey(IndexKey startKey) {
+    this.startKey = startKey;
+  }
+
+  /**
+   * @return the endKey
+   */
+  public IndexKey getEndKey() {
+    return endKey;
+  }
+
+  /**
+   * @param endKey the endKey to set
+   */
+  public void setEndKey(IndexKey endKey) {
+    this.endKey = endKey;
+  }
+
+  /**
+   * @return the maskedByteForBlock
+   */
+  public int[] getMaskedByteForBlock() {
+    return maskedByteForBlock;
+  }
+
+
+
+  /**
+   * @param maskedByteForBlock the maskedByteForBlock to set
+   */
+  public void setMaskedByteForBlock(int[] maskedByteForBlock) {
+    this.maskedByteForBlock = maskedByteForBlock;
+  }
+
+  /**
+   * @return the totalNumberDimensionBlock
+   */
+  public int getTotalNumberDimensionBlock() {
+    return totalNumberDimensionBlock;
+  }
+
+  /**
+   * @param totalNumberDimensionBlock the totalNumberDimensionBlock to set
+   */
+  public void setTotalNumberDimensionBlock(int totalNumberDimensionBlock) {
+    this.totalNumberDimensionBlock = totalNumberDimensionBlock;
+  }
+
+  /**
+   * @return the totalNumberOfMeasureBlock
+   */
+  public int getTotalNumberOfMeasureBlock() {
+    return totalNumberOfMeasureBlock;
+  }
+
+  /**
+   * @param totalNumberOfMeasureBlock the totalNumberOfMeasureBlock to set
+   */
+  public void setTotalNumberOfMeasureBlock(int totalNumberOfMeasureBlock) {
+    this.totalNumberOfMeasureBlock = totalNumberOfMeasureBlock;
+  }
+
+  /**
+   * @return the allSelectedDimensionBlocksIndexes
+   */
+  public int[][] getAllSelectedDimensionBlocksIndexes() {
+    return allSelectedDimensionBlocksIndexes;
+  }
+
+  /**
+   * @param allSelectedDimensionBlocksIndexes the 
allSelectedDimensionBlocksIndexes to set
+   */
+  public void setAllSelectedDimensionBlocksIndexes(int[][] 
allSelectedDimensionBlocksIndexes) {
+    this.allSelectedDimensionBlocksIndexes = allSelectedDimensionBlocksIndexes;
+  }
+
+  /**
+   * @return the allSelectedMeasureBlocksIndexes
+   */
+  public int[][] getAllSelectedMeasureBlocksIndexes() {
+    return allSelectedMeasureBlocksIndexes;
+  }
+
+  /**
+   * @param allSelectedMeasureBlocksIndexes the 
allSelectedMeasureBlocksIndexes to set
+   */
+  public void setAllSelectedMeasureBlocksIndexes(int[][] 
allSelectedMeasureBlocksIndexes) {
+    this.allSelectedMeasureBlocksIndexes = allSelectedMeasureBlocksIndexes;
+  }
+
+  /**
+   * @return the restructureInfos
+   */
+  public KeyStructureInfo getKeyStructureInfo() {
+    return keyStructureInfo;
+  }
+
+  /**
+   * @param keyStructureInfo the restructureInfos to set
+   */
+  public void setKeyStructureInfo(KeyStructureInfo keyStructureInfo) {
+    this.keyStructureInfo = keyStructureInfo;
+  }
+
+  /**
+   * @return the firstDataBlock
+   */
+  public DataRefNode getFirstDataBlock() {
+    return firstDataBlock;
+  }
+
+  /**
+   * @param firstDataBlock the firstDataBlock to set
+   */
+  public void setFirstDataBlock(DataRefNode firstDataBlock) {
+    this.firstDataBlock = firstDataBlock;
+  }
+
+  /**
+   * @return the numberOfBlockToScan
+   */
+  public long getNumberOfBlockToScan() {
+    return numberOfBlockToScan;
+  }
+
+  /**
+   * @param numberOfBlockToScan the numberOfBlockToScan to set
+   */
+  public void setNumberOfBlockToScan(long numberOfBlockToScan) {
+    this.numberOfBlockToScan = numberOfBlockToScan;
+  }
+
+  /**
+   * @return the fixedLengthKeySize
+   */
+  public int getFixedLengthKeySize() {
+    return fixedLengthKeySize;
+  }
+
+  /**
+   * @param fixedLengthKeySize the fixedLengthKeySize to set
+   */
+  public void setFixedLengthKeySize(int fixedLengthKeySize) {
+    this.fixedLengthKeySize = fixedLengthKeySize;
+  }
+
+  /**
+   * @return the filterEvaluatorTree
+   */
+  public FilterExecuter getFilterExecuterTree() {
+    return filterExecuterTree;
+  }
+
+  /**
+   * @param filterExecuterTree the filterEvaluatorTree to set
+   */
+  public void setFilterExecuterTree(FilterExecuter filterExecuterTree) {
+    this.filterExecuterTree = filterExecuterTree;
+  }
+
+  /**
+   * @return the tableBlockKeyGenerator
+   */
+  public KeyGenerator getBlockKeyGenerator() {
+    return blockKeyGenerator;
+  }
+
+  /**
+   * @param tableBlockKeyGenerator the tableBlockKeyGenerator to set
+   */
+  public void setBlockKeyGenerator(KeyGenerator tableBlockKeyGenerator) {
+    this.blockKeyGenerator = tableBlockKeyGenerator;
+  }
+
+  /**
+   * @return the eachColumnValueSize
+   */
+  public int[] getEachColumnValueSize() {
+    return eachColumnValueSize;
+  }
+
+  /**
+   * @param eachColumnValueSize the eachColumnValueSize to set
+   */
+  public void setEachColumnValueSize(int[] eachColumnValueSize) {
+    this.eachColumnValueSize = eachColumnValueSize;
+  }
+
+  /**
+   * @return the dictionaryColumnBlockIndex
+   */
+  public int[] getDictionaryColumnBlockIndex() {
+    return dictionaryColumnBlockIndex;
+  }
+
+  /**
+   * @param dictionaryColumnBlockIndex the dictionaryColumnBlockIndex to set
+   */
+  public void setDictionaryColumnBlockIndex(int[] dictionaryColumnBlockIndex) {
+    this.dictionaryColumnBlockIndex = dictionaryColumnBlockIndex;
+  }
+
+  /**
+   * @return the noDictionaryBlockIndexes
+   */
+  public int[] getNoDictionaryBlockIndexes() {
+    return noDictionaryBlockIndexes;
+  }
+
+  /**
+   * @param noDictionaryBlockIndexes the noDictionaryBlockIndexes to set
+   */
+  public void setNoDictionaryBlockIndexes(int[] noDictionaryBlockIndexes) {
+    this.noDictionaryBlockIndexes = noDictionaryBlockIndexes;
+  }
+
+  /**
+   * @return the columnGroupToKeyStructureInfo
+   */
+  public Map<Integer, KeyStructureInfo> getColumnGroupToKeyStructureInfo() {
+    return columnGroupToKeyStructureInfo;
+  }
+
+  /**
+   * @param columnGroupToKeyStructureInfo the columnGroupToKeyStructureInfo to 
set
+   */
+  public void setColumnGroupToKeyStructureInfo(
+      Map<Integer, KeyStructureInfo> columnGroupToKeyStructureInfo) {
+    this.columnGroupToKeyStructureInfo = columnGroupToKeyStructureInfo;
+  }
+
+  /**
+   * @param columnIdToDcitionaryMapping the columnIdToDcitionaryMapping to set
+   */
+  public void setColumnIdToDcitionaryMapping(Map<String, Dictionary> 
columnIdToDcitionaryMapping) {
+    this.columnIdToDcitionaryMapping = columnIdToDcitionaryMapping;
+  }
+
+  public boolean isRawRecordDetailQuery() {
+    return isRawRecordDetailQuery;
+  }
+
+  public void setRawRecordDetailQuery(boolean rawRecordDetailQuery) {
+    isRawRecordDetailQuery = rawRecordDetailQuery;
+  }
+
+  /**
+   * @return the complexParentIndexToQueryMap
+   */
+  public Map<Integer, GenericQueryType> getComlexDimensionInfoMap() {
+    return complexParentIndexToQueryMap;
+  }
+
+  /**
+   * @param complexDimensionInfoMap the complexParentIndexToQueryMap to set
+   */
+  public void setComplexDimensionInfoMap(Map<Integer, GenericQueryType> 
complexDimensionInfoMap) {
+    this.complexParentIndexToQueryMap = complexDimensionInfoMap;
+  }
+
+  /**
+   * @return the complexColumnParentBlockIndexes
+   */
+  public int[] getComplexColumnParentBlockIndexes() {
+    return complexColumnParentBlockIndexes;
+  }
+
+  /**
+   * @param complexColumnParentBlockIndexes the 
complexColumnParentBlockIndexes to set
+   */
+  public void setComplexColumnParentBlockIndexes(int[] 
complexColumnParentBlockIndexes) {
+    this.complexColumnParentBlockIndexes = complexColumnParentBlockIndexes;
+  }
+
+  public QueryDimension[] getQueryDimensions() {
+    return queryDimensions;
+  }
+
+  public void setQueryDimensions(QueryDimension[] queryDimensions) {
+    this.queryDimensions = queryDimensions;
+  }
+
+  public QueryMeasure[] getQueryMeasures() {
+    return queryMeasures;
+  }
+
+  public void setQueryMeasures(QueryMeasure[] queryMeasures) {
+    this.queryMeasures = queryMeasures;
+  }
+
+  /**
+   * The method to set the number of blocklets to be scanned
+   *
+   * @param numberOfBlockletToScan
+   */
+  public void setNumberOfBlockletToScan(int numberOfBlockletToScan) {
+    this.numberOfBlockletToScan = numberOfBlockletToScan;
+  }
+
+  /**
+   * get the no of blocklet  to be scanned
+   *
+   * @return
+   */
+  public int getNumberOfBlockletToScan() {
+    return numberOfBlockletToScan;
+  }
+
+  /**
+   * returns the blocklet index to be scanned
+   *
+   * @return
+   */
+  public int getStartBlockletIndex() {
+    return startBlockletIndex;
+  }
+
+  /**
+   * set the blocklet index to be scanned
+   *
+   * @param startBlockletIndex
+   */
+  public void setStartBlockletIndex(int startBlockletIndex) {
+    this.startBlockletIndex = startBlockletIndex;
+  }
+
+  public boolean isVectorBatchCollector() {
+    return vectorBatchCollector;
+  }
+
+  public void setVectorBatchCollector(boolean vectorBatchCollector) {
+    this.vectorBatchCollector = vectorBatchCollector;
+  }
+
+  public String getBlockId() {
+    return blockId;
+  }
+
+  public void setBlockId(String blockId) {
+    this.blockId = blockId;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/KeyStructureInfo.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/KeyStructureInfo.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/KeyStructureInfo.java
new file mode 100644
index 0000000..ad45eb8
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/KeyStructureInfo.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.core.scan.executor.infos;
+
+import org.apache.carbondata.core.keygenerator.KeyGenerator;
+
+/**
+ * Below class will store the structure of the key
+ * used during query execution
+ */
+public class KeyStructureInfo {
+
+  /**
+   * it's actually a latest key generator
+   * last table block as this key generator will be used to
+   * to update the mdkey of the older slice with the new slice
+   */
+  private KeyGenerator keyGenerator;
+
+  /**
+   * mask bytes ranges for the query
+   */
+  private int[] maskByteRanges;
+
+  /**
+   * max key for query execution
+   */
+  private byte[] maxKey;
+
+  /**
+   * dimension ordinals inside the column group
+   */
+  private int[] mdkeyQueryDimensionOrdinal;
+
+  /**
+   * @return the keyGenerator
+   */
+  public KeyGenerator getKeyGenerator() {
+    return keyGenerator;
+  }
+
+  /**
+   * @param keyGenerator the keyGenerator to set
+   */
+  public void setKeyGenerator(KeyGenerator keyGenerator) {
+    this.keyGenerator = keyGenerator;
+  }
+
+  /**
+   * @return the maskByteRanges
+   */
+  public int[] getMaskByteRanges() {
+    return maskByteRanges;
+  }
+
+  /**
+   * @param maskByteRanges the maskByteRanges to set
+   */
+  public void setMaskByteRanges(int[] maskByteRanges) {
+    this.maskByteRanges = maskByteRanges;
+  }
+
+  /**
+   * @return the maxKey
+   */
+  public byte[] getMaxKey() {
+    return maxKey;
+  }
+
+  /**
+   * @param maxKey the maxKey to set
+   */
+  public void setMaxKey(byte[] maxKey) {
+    this.maxKey = maxKey;
+  }
+
+  public int[] getMdkeyQueryDimensionOrdinal() {
+    return mdkeyQueryDimensionOrdinal;
+  }
+
+  public void setMdkeyQueryDimensionOrdinal(int[] mdkeyQueryDimensionOrdinal) {
+    this.mdkeyQueryDimensionOrdinal = mdkeyQueryDimensionOrdinal;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
new file mode 100644
index 0000000..676d978
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/QueryUtil.java
@@ -0,0 +1,882 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.core.scan.executor.util;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.TreeSet;
+
+import org.apache.carbondata.core.cache.Cache;
+import org.apache.carbondata.core.cache.CacheProvider;
+import org.apache.carbondata.core.cache.CacheType;
+import org.apache.carbondata.core.cache.dictionary.Dictionary;
+import 
org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
+import org.apache.carbondata.core.AbsoluteTableIdentifier;
+import org.apache.carbondata.core.CarbonTableIdentifier;
+import org.apache.carbondata.core.datastore.block.SegmentProperties;
+import org.apache.carbondata.core.metadata.CarbonMetadata;
+import org.apache.carbondata.core.metadata.DataType;
+import org.apache.carbondata.core.metadata.Encoding;
+import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.keygenerator.KeyGenException;
+import org.apache.carbondata.core.keygenerator.KeyGenerator;
+import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.scan.complextypes.ArrayQueryType;
+import org.apache.carbondata.core.scan.complextypes.PrimitiveQueryType;
+import org.apache.carbondata.core.scan.complextypes.StructQueryType;
+import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
+import org.apache.carbondata.core.scan.expression.ColumnExpression;
+import org.apache.carbondata.core.scan.expression.Expression;
+import org.apache.carbondata.core.scan.filter.GenericQueryType;
+import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
+import 
org.apache.carbondata.core.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
+import org.apache.carbondata.core.scan.model.QueryDimension;
+import org.apache.carbondata.core.scan.model.QueryMeasure;
+import org.apache.carbondata.core.scan.model.QueryModel;
+
+import org.apache.commons.lang3.ArrayUtils;
+
+/**
+ * Utility class for query execution
+ */
+public class QueryUtil {
+
+  /**
+   * Below method will be used to get the masked byte range based on the query
+   * dimension. It will give the range in the mdkey. This will be used to get
+   * the actual key array from masked mdkey
+   *
+   * @param queryDimensions query dimension selected in query
+   * @param keyGenerator    key generator
+   * @return masked key
+   */
+  public static int[] getMaskedByteRange(List<QueryDimension> queryDimensions,
+      KeyGenerator keyGenerator) {
+    Set<Integer> byteRangeSet = new TreeSet<Integer>();
+    int[] byteRange = null;
+    for (int i = 0; i < queryDimensions.size(); i++) {
+
+      // as no dictionary column and complex type columns
+      // are not selected in the mdkey
+      // so we will not select the those dimension for calculating the
+      // range
+      if (queryDimensions.get(i).getDimension().getKeyOrdinal() == -1) {
+        continue;
+      }
+      // get the offset of the dimension in the mdkey
+      byteRange =
+          
keyGenerator.getKeyByteOffsets(queryDimensions.get(i).getDimension().getKeyOrdinal());
+      for (int j = byteRange[0]; j <= byteRange[1]; j++) {
+        byteRangeSet.add(j);
+      }
+    }
+    int[] maksedByteRange = new int[byteRangeSet.size()];
+    int index = 0;
+    Iterator<Integer> iterator = byteRangeSet.iterator();
+    // add the masked byte range
+    while (iterator.hasNext()) {
+      maksedByteRange[index++] = iterator.next();
+    }
+    return maksedByteRange;
+  }
+
+  public static int[] getMaskedByteRangeBasedOrdinal(List<Integer> ordinals,
+      KeyGenerator keyGenerator) {
+    Set<Integer> byteRangeSet = new TreeSet<Integer>();
+    int[] byteRange = null;
+    for (int i = 0; i < ordinals.size(); i++) {
+
+      // get the offset of the dimension in the mdkey
+      byteRange = keyGenerator.getKeyByteOffsets(ordinals.get(i));
+      for (int j = byteRange[0]; j <= byteRange[1]; j++) {
+        byteRangeSet.add(j);
+      }
+    }
+    int[] maksedByteRange = new int[byteRangeSet.size()];
+    int index = 0;
+    Iterator<Integer> iterator = byteRangeSet.iterator();
+    // add the masked byte range
+    while (iterator.hasNext()) {
+      maksedByteRange[index++] = iterator.next();
+    }
+    return maksedByteRange;
+  }
+
+  /**
+   * Below method will return the max key based on the dimension ordinal
+   *
+   * @param keyOrdinalList
+   * @param generator
+   * @return
+   * @throws KeyGenException
+   */
+  public static byte[] getMaxKeyBasedOnOrinal(List<Integer> keyOrdinalList, 
KeyGenerator generator)
+      throws KeyGenException {
+    long[] max = new long[generator.getDimCount()];
+    Arrays.fill(max, 0L);
+
+    for (int i = 0; i < keyOrdinalList.size(); i++) {
+      // adding for dimension which is selected in query
+      max[keyOrdinalList.get(i)] = Long.MAX_VALUE;
+    }
+    return generator.generateKey(max);
+  }
+
+  /**
+   * To get the max key based on dimensions. i.e. all other dimensions will be
+   * set to 0 bits and the required query dimension will be masked with all
+   * LONG.MAX so that we can mask key and then compare while aggregating This
+   * can be useful during filter query when only few dimensions were selected
+   * out of row group
+   *
+   * @param queryDimensions dimension selected in query
+   * @param generator       key generator
+   * @return max key for dimension
+   * @throws KeyGenException if any problem while generating the key
+   */
+  public static byte[] getMaxKeyBasedOnDimensions(List<QueryDimension> 
queryDimensions,
+      KeyGenerator generator) throws KeyGenException {
+    long[] max = new long[generator.getDimCount()];
+    Arrays.fill(max, 0L);
+
+    for (int i = 0; i < queryDimensions.size(); i++) {
+      // as no dictionary column and complex type columns
+      // are not selected in the mdkey
+      // so we will not select the those dimension for calculating the
+      // range
+      if (queryDimensions.get(i).getDimension().getKeyOrdinal() == -1) {
+        continue;
+      }
+      // adding for dimension which is selected in query
+      max[queryDimensions.get(i).getDimension().getKeyOrdinal()] = 
Long.MAX_VALUE;
+    }
+
+    return generator.generateKey(max);
+  }
+
+  /**
+   * Below method will be used to get the masked key for query
+   *
+   * @param keySize         size of the masked key
+   * @param maskedKeyRanges masked byte range
+   * @return masked bytes
+   */
+  public static int[] getMaskedByte(int keySize, int[] maskedKeyRanges) {
+    int[] maskedKey = new int[keySize];
+    // all the non selected dimension will be filled with -1
+    Arrays.fill(maskedKey, -1);
+    for (int i = 0; i < maskedKeyRanges.length; i++) {
+      maskedKey[maskedKeyRanges[i]] = i;
+    }
+    return maskedKey;
+  }
+
+  /**
+   * Below method will be used to get the dimension block index in file based
+   * on query dimension
+   *
+   * @param queryDimensions                query dimension
+   * @param dimensionOrdinalToBlockMapping mapping of dimension block in file 
to query dimension
+   * @return block index of file
+   */
+  public static int[] getDimensionsBlockIndexes(List<QueryDimension> 
queryDimensions,
+      Map<Integer, Integer> dimensionOrdinalToBlockMapping,
+      List<CarbonDimension> customAggregationDimension, Set<CarbonDimension> 
filterDimensions,
+      List<Integer> allProjectionListDimensionIndexes) {
+    // using set as in row group columns will point to same block
+    Set<Integer> dimensionBlockIndex = new HashSet<Integer>();
+    Set<Integer> filterDimensionOrdinal = 
getFilterDimensionOrdinal(filterDimensions);
+    int blockIndex = 0;
+    for (int i = 0; i < queryDimensions.size(); i++) {
+      if 
(queryDimensions.get(i).getDimension().hasEncoding(Encoding.IMPLICIT)) {
+        continue;
+      }
+      
allProjectionListDimensionIndexes.add(queryDimensions.get(i).getDimension().getOrdinal());
+
+      if 
(!filterDimensionOrdinal.contains(queryDimensions.get(i).getDimension().getOrdinal()))
 {
+        blockIndex =
+            
dimensionOrdinalToBlockMapping.get(queryDimensions.get(i).getDimension().getOrdinal());
+        dimensionBlockIndex.add(blockIndex);
+        if (queryDimensions.get(i).getDimension().numberOfChild() > 0) {
+          addChildrenBlockIndex(dimensionBlockIndex, 
queryDimensions.get(i).getDimension());
+        }
+      }
+    }
+    for (int i = 0; i < customAggregationDimension.size(); i++) {
+      blockIndex =
+          
dimensionOrdinalToBlockMapping.get(customAggregationDimension.get(i).getOrdinal());
+      // not adding the children dimension as dimension aggregation
+      // is not push down in case of complex dimension
+      dimensionBlockIndex.add(blockIndex);
+    }
+    int[] dimensionIndex = ArrayUtils
+        .toPrimitive(dimensionBlockIndex.toArray(new 
Integer[dimensionBlockIndex.size()]));
+    Arrays.sort(dimensionIndex);
+    return dimensionIndex;
+  }
+
+  /**
+   * Below method will be used to add the children block index
+   * this will be basically for complex dimension which will have children
+   *
+   * @param blockIndexes block indexes
+   * @param dimension    parent dimension
+   */
+  private static void addChildrenBlockIndex(Set<Integer> blockIndexes, 
CarbonDimension dimension) {
+    for (int i = 0; i < dimension.numberOfChild(); i++) {
+      addChildrenBlockIndex(blockIndexes, 
dimension.getListOfChildDimensions().get(i));
+      
blockIndexes.add(dimension.getListOfChildDimensions().get(i).getOrdinal());
+    }
+  }
+
+  /**
+   * Below method will be used to get the dictionary mapping for all the
+   * dictionary encoded dimension present in the query
+   *
+   * @param queryDimensions         query dimension present in the query this 
will be used to
+   *                                convert the result from surrogate key to 
actual data
+   * @param absoluteTableIdentifier absolute table identifier
+   * @return dimension unique id to its dictionary map
+   * @throws IOException
+   */
+  public static Map<String, Dictionary> getDimensionDictionaryDetail(
+      List<QueryDimension> queryDimensions, Set<CarbonDimension> 
filterComplexDimensions,
+      AbsoluteTableIdentifier absoluteTableIdentifier) throws IOException {
+    // to store dimension unique column id list, this is required as
+    // dimension can be present in
+    // query dimension, as well as some aggregation function will be applied
+    // in the same dimension
+    // so we need to get only one instance of dictionary
+    // direct dictionary skip is done only for the dictionary lookup
+    Set<String> dictionaryDimensionFromQuery = new HashSet<String>();
+    for (int i = 0; i < queryDimensions.size(); i++) {
+      List<Encoding> encodingList = 
queryDimensions.get(i).getDimension().getEncoder();
+      // TODO need to remove the data type check for parent column in complex 
type no need to
+      // write encoding dictionary
+      if (CarbonUtil.hasEncoding(encodingList, Encoding.DICTIONARY) && 
!CarbonUtil
+          .hasEncoding(encodingList, Encoding.DIRECT_DICTIONARY) && !CarbonUtil
+          .hasEncoding(encodingList, Encoding.IMPLICIT)) {
+
+        if (queryDimensions.get(i).getDimension().numberOfChild() == 0) {
+          
dictionaryDimensionFromQuery.add(queryDimensions.get(i).getDimension().getColumnId());
+        }
+        if (queryDimensions.get(i).getDimension().numberOfChild() > 0) {
+          
getChildDimensionDictionaryDetail(queryDimensions.get(i).getDimension(),
+              dictionaryDimensionFromQuery);
+        }
+      }
+    }
+    Iterator<CarbonDimension> iterator = filterComplexDimensions.iterator();
+    while (iterator.hasNext()) {
+      getChildDimensionDictionaryDetail(iterator.next(), 
dictionaryDimensionFromQuery);
+    }
+    // converting to list as api exposed needed list which i think
+    // is not correct
+    List<String> dictionaryColumnIdList =
+        new ArrayList<String>(dictionaryDimensionFromQuery.size());
+    dictionaryColumnIdList.addAll(dictionaryDimensionFromQuery);
+    return getDictionaryMap(dictionaryColumnIdList, absoluteTableIdentifier);
+  }
+
+  /**
+   * Below method will be used to fill the children dimension column id
+   *
+   * @param queryDimensions              query dimension
+   * @param dictionaryDimensionFromQuery dictionary dimension for query
+   */
+  private static void getChildDimensionDictionaryDetail(CarbonDimension 
queryDimensions,
+      Set<String> dictionaryDimensionFromQuery) {
+    for (int j = 0; j < queryDimensions.numberOfChild(); j++) {
+      List<Encoding> encodingList = 
queryDimensions.getListOfChildDimensions().get(j).getEncoder();
+      if (queryDimensions.getListOfChildDimensions().get(j).numberOfChild() > 
0) {
+        
getChildDimensionDictionaryDetail(queryDimensions.getListOfChildDimensions().get(j),
+            dictionaryDimensionFromQuery);
+      } else if (!CarbonUtil.hasEncoding(encodingList, 
Encoding.DIRECT_DICTIONARY)) {
+        dictionaryDimensionFromQuery
+            
.add(queryDimensions.getListOfChildDimensions().get(j).getColumnId());
+      }
+    }
+  }
+
+  /**
+   * Below method will be used to get the column id to its dictionary mapping
+   *
+   * @param dictionaryColumnIdList  dictionary column list
+   * @param absoluteTableIdentifier absolute table identifier
+   * @return dictionary mapping
+   * @throws IOException
+   */
+  private static Map<String, Dictionary> getDictionaryMap(List<String> 
dictionaryColumnIdList,
+      AbsoluteTableIdentifier absoluteTableIdentifier) throws IOException {
+    // this for dictionary unique identifier
+    List<DictionaryColumnUniqueIdentifier> dictionaryColumnUniqueIdentifiers =
+        getDictionaryColumnUniqueIdentifierList(dictionaryColumnIdList,
+            absoluteTableIdentifier.getCarbonTableIdentifier());
+    CacheProvider cacheProvider = CacheProvider.getInstance();
+    Cache<DictionaryColumnUniqueIdentifier, Dictionary> forwardDictionaryCache 
= cacheProvider
+        .createCache(CacheType.FORWARD_DICTIONARY, 
absoluteTableIdentifier.getStorePath());
+
+    List<Dictionary> columnDictionaryList =
+        forwardDictionaryCache.getAll(dictionaryColumnUniqueIdentifiers);
+    Map<String, Dictionary> columnDictionaryMap = new 
HashMap<>(columnDictionaryList.size());
+    for (int i = 0; i < dictionaryColumnUniqueIdentifiers.size(); i++) {
+      // TODO: null check for column dictionary, if cache size is less it
+      // might return null here, in that case throw exception
+      columnDictionaryMap.put(dictionaryColumnIdList.get(i), 
columnDictionaryList.get(i));
+    }
+    return columnDictionaryMap;
+  }
+
+  /**
+   * Below method will be used to get the dictionary column unique identifier
+   *
+   * @param dictionaryColumnIdList dictionary
+   * @param carbonTableIdentifier
+   * @return
+   */
+  private static List<DictionaryColumnUniqueIdentifier> 
getDictionaryColumnUniqueIdentifierList(
+      List<String> dictionaryColumnIdList, CarbonTableIdentifier 
carbonTableIdentifier) {
+    CarbonTable carbonTable =
+        
CarbonMetadata.getInstance().getCarbonTable(carbonTableIdentifier.getTableUniqueName());
+    List<DictionaryColumnUniqueIdentifier> dictionaryColumnUniqueIdentifiers =
+        new ArrayList<>(dictionaryColumnIdList.size());
+    for (String columnId : dictionaryColumnIdList) {
+      CarbonDimension dimension = CarbonMetadata.getInstance()
+          .getCarbonDimensionBasedOnColIdentifier(carbonTable, columnId);
+      if (dimension != null) {
+        dictionaryColumnUniqueIdentifiers.add(
+            new DictionaryColumnUniqueIdentifier(
+                carbonTableIdentifier,
+                dimension.getColumnIdentifier(),
+                dimension.getDataType()
+            )
+        );
+      }
+    }
+    return dictionaryColumnUniqueIdentifiers;
+  }
+
+  /**
+   * Below method will used to get the method will be used to get the measure
+   * block indexes to be read from the file
+   *
+   * @param queryMeasures              query measure
+   * @param expressionMeasure          measure present in the expression
+   * @param ordinalToBlockIndexMapping measure ordinal to block mapping
+   * @return block indexes
+   */
+  public static int[] getMeasureBlockIndexes(List<QueryMeasure> queryMeasures,
+      List<CarbonMeasure> expressionMeasure, Map<Integer, Integer> 
ordinalToBlockIndexMapping,
+      Set<CarbonMeasure> filterMeasures) {
+    Set<Integer> measureBlockIndex = new HashSet<Integer>();
+    Set<Integer> filterMeasureOrdinal = 
getFilterMeasureOrdinal(filterMeasures);
+    for (int i = 0; i < queryMeasures.size(); i++) {
+      if 
(!filterMeasureOrdinal.contains(queryMeasures.get(i).getMeasure().getOrdinal()))
 {
+        measureBlockIndex
+            
.add(ordinalToBlockIndexMapping.get(queryMeasures.get(i).getMeasure().getOrdinal()));
+      }
+    }
+    for (int i = 0; i < expressionMeasure.size(); i++) {
+      
measureBlockIndex.add(ordinalToBlockIndexMapping.get(expressionMeasure.get(i).getOrdinal()));
+    }
+    int[] measureIndexes =
+        ArrayUtils.toPrimitive(measureBlockIndex.toArray(new 
Integer[measureBlockIndex.size()]));
+    Arrays.sort(measureIndexes);
+    return measureIndexes;
+  }
+
+  /**
+   * Below method will be used to get mapping whether dimension is present in
+   * order by or not
+   *
+   * @param sortedDimensions sort dimension present in order by query
+   * @param queryDimensions  query dimension
+   * @return sort dimension indexes
+   */
+  public static byte[] getSortDimensionIndexes(List<QueryDimension> 
sortedDimensions,
+      List<QueryDimension> queryDimensions) {
+    byte[] sortedDims = new byte[queryDimensions.size()];
+    int indexOf = 0;
+    for (int i = 0; i < sortedDims.length; i++) {
+      indexOf = sortedDimensions.indexOf(queryDimensions.get(i));
+      if (indexOf > -1) {
+        sortedDims[i] = 1;
+      }
+    }
+    return sortedDims;
+  }
+
+  /**
+   * Below method will be used to get the mapping of block index and its
+   * restructuring info
+   *
+   * @param queryDimensions   query dimension from query model
+   * @param segmentProperties segment properties
+   * @return map of block index to its restructuring info
+   * @throws KeyGenException if problem while key generation
+   */
+  public static Map<Integer, KeyStructureInfo> getColumnGroupKeyStructureInfo(
+      List<QueryDimension> queryDimensions, SegmentProperties 
segmentProperties)
+      throws KeyGenException {
+    Map<Integer, KeyStructureInfo> rowGroupToItsRSInfo = new HashMap<Integer, 
KeyStructureInfo>();
+    // get column group id and its ordinal mapping of column group
+    Map<Integer, List<Integer>> columnGroupAndItsOrdinalMappingForQuery =
+        getColumnGroupAndItsOrdinalMapping(queryDimensions);
+    Map<Integer, KeyGenerator> columnGroupAndItsKeygenartor =
+        segmentProperties.getColumnGroupAndItsKeygenartor();
+
+    Iterator<Entry<Integer, List<Integer>>> iterator =
+        columnGroupAndItsOrdinalMappingForQuery.entrySet().iterator();
+    KeyStructureInfo restructureInfos = null;
+    while (iterator.hasNext()) {
+      Entry<Integer, List<Integer>> next = iterator.next();
+      KeyGenerator keyGenerator = 
columnGroupAndItsKeygenartor.get(next.getKey());
+      restructureInfos = new KeyStructureInfo();
+      // sort the ordinal
+      List<Integer> ordinal = next.getValue();
+      List<Integer> mdKeyOrdinal = new ArrayList<Integer>();
+      //Un sorted
+      List<Integer> mdKeyOrdinalForQuery = new ArrayList<Integer>();
+      for (Integer ord : ordinal) {
+        
mdKeyOrdinal.add(segmentProperties.getColumnGroupMdKeyOrdinal(next.getKey(), 
ord));
+        
mdKeyOrdinalForQuery.add(segmentProperties.getColumnGroupMdKeyOrdinal(next.getKey(),
 ord));
+      }
+      Collections.sort(mdKeyOrdinal);
+      // get the masked byte range for column group
+      int[] maskByteRanges = getMaskedByteRangeBasedOrdinal(mdKeyOrdinal, 
keyGenerator);
+      // max key for column group
+      byte[] maxKey = getMaxKeyBasedOnOrinal(mdKeyOrdinal, keyGenerator);
+      restructureInfos.setKeyGenerator(keyGenerator);
+      restructureInfos.setMaskByteRanges(maskByteRanges);
+      restructureInfos.setMaxKey(maxKey);
+      restructureInfos.setMdkeyQueryDimensionOrdinal(ArrayUtils
+          .toPrimitive(mdKeyOrdinalForQuery.toArray(new 
Integer[mdKeyOrdinalForQuery.size()])));
+      rowGroupToItsRSInfo
+          
.put(segmentProperties.getDimensionOrdinalToBlockMapping().get(ordinal.get(0)),
+              restructureInfos);
+    }
+    return rowGroupToItsRSInfo;
+  }
+
+  /**
+   * return true if given key is found in array
+   *
+   * @param data
+   * @param key
+   * @return
+   */
+  public static boolean searchInArray(int[] data, int key) {
+    for (int i = 0; i < data.length; i++) {
+      if (key == data[i]) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  /**
+   * Below method will be used to create a mapping of column group columns
+   * this mapping will have column group id to all the dimension ordinal
+   * present in the column group This mapping will be used during query
+   * execution, to create a mask key for the column group dimension which will
+   * be used in aggregation and filter query as column group dimension will be
+   * stored in bit level
+   */
+  private static Map<Integer, List<Integer>> 
getColumnGroupAndItsOrdinalMapping(
+      List<QueryDimension> origdimensions) {
+
+    List<QueryDimension> dimensions = new 
ArrayList<QueryDimension>(origdimensions.size());
+    dimensions.addAll(origdimensions);
+    /**
+     * sort based on column group id
+     */
+    Collections.sort(dimensions, new Comparator<QueryDimension>() {
+
+      @Override public int compare(QueryDimension o1, QueryDimension o2) {
+        return Integer
+            .compare(o1.getDimension().columnGroupId(), 
o2.getDimension().columnGroupId());
+      }
+    });
+    // list of row groups this will store all the row group column
+    Map<Integer, List<Integer>> columnGroupAndItsOrdinalsMapping =
+        new HashMap<Integer, List<Integer>>();
+    // to store a column group
+    List<Integer> currentColumnGroup = null;
+    // current index
+    int index = 0;
+    // previous column group to check all the column of row id has bee
+    // selected
+    int prvColumnGroupId = -1;
+    while (index < dimensions.size()) {
+      // if dimension group id is not zero and it is same as the previous
+      // column group id
+      // then we need to add ordinal of that column as it belongs to same
+      // column group
+      if (dimensions.get(index).getDimension().hasEncoding(Encoding.IMPLICIT)) 
{
+        index++;
+        continue;
+      } else if (!dimensions.get(index).getDimension().isColumnar()
+          && dimensions.get(index).getDimension().columnGroupId() == 
prvColumnGroupId
+          && null != currentColumnGroup) {
+        
currentColumnGroup.add(dimensions.get(index).getDimension().getOrdinal());
+      }
+
+      // if dimension is not a columnar then it is column group column
+      else if (!dimensions.get(index).getDimension().isColumnar()) {
+        currentColumnGroup = new ArrayList<Integer>();
+        columnGroupAndItsOrdinalsMapping
+            .put(dimensions.get(index).getDimension().columnGroupId(), 
currentColumnGroup);
+        
currentColumnGroup.add(dimensions.get(index).getDimension().getOrdinal());
+      }
+      // update the row id every time,this is required to group the
+      // columns
+      // of the same row group
+      prvColumnGroupId = dimensions.get(index).getDimension().columnGroupId();
+      index++;
+    }
+    return columnGroupAndItsOrdinalsMapping;
+  }
+
+  /**
+   * Below method will be used to get masked byte
+   *
+   * @param data           actual data
+   * @param maxKey         max key
+   * @param maskByteRanges mask byte range
+   * @param byteCount
+   * @return masked byte
+   */
+  public static byte[] getMaskedKey(byte[] data, byte[] maxKey, int[] 
maskByteRanges,
+      int byteCount) {
+    byte[] maskedKey = new byte[byteCount];
+    int counter = 0;
+    int byteRange = 0;
+    for (int i = 0; i < byteCount; i++) {
+      byteRange = maskByteRanges[i];
+      if (byteRange != -1) {
+        maskedKey[counter++] = (byte) (data[byteRange] & maxKey[byteRange]);
+      }
+    }
+    return maskedKey;
+  }
+
+  /**
+   * Below method will be used to fill block indexes of the query dimension
+   * which will be used in creating a output row Here is method we are passing
+   * two list which store the indexes one for dictionary column other for not
+   * dictionary column. This is done for specific purpose so that in one
+   * iteration we will be able to fill both type dimension block indexes
+   *
+   * @param queryDimensions                  dimension present in the query
+   * @param columnOrdinalToBlockIndexMapping column ordinal to block index 
mapping
+   * @param dictionaryDimensionBlockIndex    list to store dictionary column 
block indexes
+   * @param noDictionaryDimensionBlockIndex  list to store no dictionary block 
indexes
+   */
+  public static void fillQueryDimensionsBlockIndexes(List<QueryDimension> 
queryDimensions,
+      Map<Integer, Integer> columnOrdinalToBlockIndexMapping,
+      Set<Integer> dictionaryDimensionBlockIndex, List<Integer> 
noDictionaryDimensionBlockIndex) {
+    for (QueryDimension queryDimension : queryDimensions) {
+      if (CarbonUtil.hasEncoding(queryDimension.getDimension().getEncoder(), 
Encoding.DICTIONARY)
+          && queryDimension.getDimension().numberOfChild() == 0) {
+        dictionaryDimensionBlockIndex
+            
.add(columnOrdinalToBlockIndexMapping.get(queryDimension.getDimension().getOrdinal()));
+      } else if (
+          !CarbonUtil.hasEncoding(queryDimension.getDimension().getEncoder(), 
Encoding.IMPLICIT)
+              && queryDimension.getDimension().numberOfChild() == 0) {
+        noDictionaryDimensionBlockIndex
+            
.add(columnOrdinalToBlockIndexMapping.get(queryDimension.getDimension().getOrdinal()));
+      }
+    }
+  }
+
+  /**
+   * Below method will be used to resolve the query model
+   * resolve will be setting the actual dimension and measure object
+   * as from driver only column name will be passes to avoid the heavy object
+   * serialization
+   *
+   * @param queryModel query model
+   */
+  public static void resolveQueryModel(QueryModel queryModel) {
+    CarbonMetadata.getInstance().addCarbonTable(queryModel.getTable());
+    // TODO need to load the table from table identifier
+    CarbonTable carbonTable = queryModel.getTable();
+    String tableName =
+        
queryModel.getAbsoluteTableIdentifier().getCarbonTableIdentifier().getTableName();
+    // resolve query dimension
+    for (QueryDimension queryDimension : queryModel.getQueryDimension()) {
+      queryDimension
+          .setDimension(carbonTable.getDimensionByName(tableName, 
queryDimension.getColumnName()));
+    }
+    // resolve query measure
+    for (QueryMeasure queryMeasure : queryModel.getQueryMeasures()) {
+      // in case of count start column name will  be count * so
+      // first need to check any measure is present or not and as if measure
+      // if measure is present and if first measure is not a default
+      // measure than add measure otherwise
+      // than add first dimension as a measure
+      //as currently if measure is not present then
+      //we are adding default measure so first condition will
+      //never come false but if in future we can remove so not removing first 
if check
+      if (queryMeasure.getColumnName().equals("count(*)")) {
+        if (carbonTable.getMeasureByTableName(tableName).size() > 0 && 
!carbonTable
+            .getMeasureByTableName(tableName).get(0).getColName()
+            .equals(CarbonCommonConstants.DEFAULT_INVISIBLE_DUMMY_MEASURE)) {
+          
queryMeasure.setMeasure(carbonTable.getMeasureByTableName(tableName).get(0));
+        } else {
+          CarbonMeasure dummyMeasure = new CarbonMeasure(
+              
carbonTable.getDimensionByTableName(tableName).get(0).getColumnSchema(), 0);
+          queryMeasure.setMeasure(dummyMeasure);
+        }
+      } else {
+        queryMeasure
+            .setMeasure(carbonTable.getMeasureByName(tableName, 
queryMeasure.getColumnName()));
+      }
+    }
+  }
+
+  /**
+   * below method will be used to get the actual type aggregator
+   *
+   * @param aggType
+   * @return index in aggrgetor
+   */
+  public static int[] getActualTypeIndex(List<String> aggType) {
+    List<Integer> indexList = new ArrayList<Integer>();
+    for (int i = 0; i < aggType.size(); i++) {
+      if (!CarbonCommonConstants.SUM.equals(aggType.get(i)) && 
!CarbonCommonConstants.AVERAGE
+          .equals(aggType.get(i))) {
+        indexList.add(i);
+      }
+    }
+    return ArrayUtils.toPrimitive(indexList.toArray(new 
Integer[indexList.size()]));
+  }
+
+  /**
+   * Below method will be used to get the key structure for the column group
+   *
+   * @param segmentProperties      segment properties
+   * @param dimColumnEvaluatorInfo dimension evaluator info
+   * @return key structure info for column group dimension
+   * @throws KeyGenException
+   */
+  public static KeyStructureInfo getKeyStructureInfo(SegmentProperties 
segmentProperties,
+      DimColumnResolvedFilterInfo dimColumnEvaluatorInfo) throws 
KeyGenException {
+    int colGrpId = getColumnGroupId(segmentProperties, 
dimColumnEvaluatorInfo.getColumnIndex());
+    KeyGenerator keyGenerator = 
segmentProperties.getColumnGroupAndItsKeygenartor().get(colGrpId);
+    List<Integer> mdKeyOrdinal = new ArrayList<Integer>();
+
+    mdKeyOrdinal.add(segmentProperties
+        .getColumnGroupMdKeyOrdinal(colGrpId, 
dimColumnEvaluatorInfo.getColumnIndex()));
+    int[] maskByteRanges = 
QueryUtil.getMaskedByteRangeBasedOrdinal(mdKeyOrdinal, keyGenerator);
+    byte[] maxKey = QueryUtil.getMaxKeyBasedOnOrinal(mdKeyOrdinal, 
keyGenerator);
+    KeyStructureInfo restructureInfos = new KeyStructureInfo();
+    restructureInfos.setKeyGenerator(keyGenerator);
+    restructureInfos.setMaskByteRanges(maskByteRanges);
+    restructureInfos.setMaxKey(maxKey);
+    return restructureInfos;
+  }
+
+  /**
+   * Below method will be used to get the column group id based on the ordinal
+   *
+   * @param segmentProperties segment properties
+   * @param ordinal           ordinal to be searched
+   * @return column group id
+   */
+  public static int getColumnGroupId(SegmentProperties segmentProperties, int 
ordinal) {
+    int[][] columnGroups = segmentProperties.getColumnGroups();
+    int colGrpId = -1;
+    for (int i = 0; i < columnGroups.length; i++) {
+      if (columnGroups[i].length > 1) {
+        colGrpId++;
+        if (QueryUtil.searchInArray(columnGroups[i], ordinal)) {
+          break;
+        }
+      }
+    }
+    return colGrpId;
+  }
+
+  /**
+   * Below method will be used to get the map of for complex dimension and its 
type
+   * which will be used to during query execution to
+   *
+   * @param queryDimensions          complex dimension in query
+   * @param dimensionToBlockIndexMap dimension to block index in file map
+   * @return complex dimension and query type
+   */
+  public static Map<Integer, GenericQueryType> getComplexDimensionsMap(
+      List<QueryDimension> queryDimensions, Map<Integer, Integer> 
dimensionToBlockIndexMap,
+      int[] eachComplexColumnValueSize, Map<String, Dictionary> 
columnIdToDictionaryMap,
+      Set<CarbonDimension> filterDimensions) {
+    Map<Integer, GenericQueryType> complexTypeMap = new HashMap<Integer, 
GenericQueryType>();
+    for (QueryDimension dimension : queryDimensions) {
+      CarbonDimension actualDimension = dimension.getDimension();
+      if (actualDimension.getNumberOfChild() == 0) {
+        continue;
+      }
+      fillParentDetails(dimensionToBlockIndexMap, actualDimension, 
complexTypeMap,
+          eachComplexColumnValueSize, columnIdToDictionaryMap);
+    }
+    if (null != filterDimensions) {
+      for (CarbonDimension filterDimension : filterDimensions) {
+        // do not fill nay details for implicit dimension type
+        if (filterDimension.hasEncoding(Encoding.IMPLICIT)) {
+          continue;
+        }
+        fillParentDetails(dimensionToBlockIndexMap, filterDimension, 
complexTypeMap,
+            eachComplexColumnValueSize, columnIdToDictionaryMap);
+      }
+    }
+    return complexTypeMap;
+  }
+
+  private static void fillParentDetails(Map<Integer, Integer> 
dimensionToBlockIndexMap,
+      CarbonDimension dimension, Map<Integer, GenericQueryType> complexTypeMap,
+      int[] eachComplexColumnValueSize, Map<String, Dictionary> 
columnIdToDictionaryMap) {
+    int parentBlockIndex = 
dimensionToBlockIndexMap.get(dimension.getOrdinal());
+    GenericQueryType parentQueryType = 
dimension.getDataType().equals(DataType.ARRAY) ?
+        new ArrayQueryType(dimension.getColName(), dimension.getColName(), 
parentBlockIndex) :
+        new StructQueryType(dimension.getColName(), dimension.getColName(),
+            dimensionToBlockIndexMap.get(dimension.getOrdinal()));
+    complexTypeMap.put(dimension.getOrdinal(), parentQueryType);
+    parentBlockIndex =
+        fillChildrenDetails(eachComplexColumnValueSize, 
columnIdToDictionaryMap, parentBlockIndex,
+            dimension, parentQueryType);
+  }
+
+  private static int fillChildrenDetails(int[] eachComplexColumnValueSize,
+      Map<String, Dictionary> columnIdToDictionaryMap, int parentBlockIndex,
+      CarbonDimension dimension, GenericQueryType parentQueryType) {
+    for (int i = 0; i < dimension.getNumberOfChild(); i++) {
+      switch (dimension.getListOfChildDimensions().get(i).getDataType()) {
+        case ARRAY:
+          parentQueryType.addChildren(
+              new 
ArrayQueryType(dimension.getListOfChildDimensions().get(i).getColName(),
+                  dimension.getColName(), ++parentBlockIndex));
+          break;
+        case STRUCT:
+          parentQueryType.addChildren(
+              new 
StructQueryType(dimension.getListOfChildDimensions().get(i).getColName(),
+                  dimension.getColName(), ++parentBlockIndex));
+          break;
+        default:
+          boolean isDirectDictionary = CarbonUtil
+              
.hasEncoding(dimension.getListOfChildDimensions().get(i).getEncoder(),
+                  Encoding.DIRECT_DICTIONARY);
+          parentQueryType.addChildren(
+              new 
PrimitiveQueryType(dimension.getListOfChildDimensions().get(i).getColName(),
+                  dimension.getColName(), ++parentBlockIndex,
+                  dimension.getListOfChildDimensions().get(i).getDataType(),
+                  
eachComplexColumnValueSize[dimension.getListOfChildDimensions().get(i)
+                      .getComplexTypeOrdinal()], columnIdToDictionaryMap
+                  
.get(dimension.getListOfChildDimensions().get(i).getColumnId()),
+                  isDirectDictionary));
+      }
+      if (dimension.getListOfChildDimensions().get(i).getNumberOfChild() > 0) {
+        parentBlockIndex = fillChildrenDetails(eachComplexColumnValueSize, 
columnIdToDictionaryMap,
+            parentBlockIndex, dimension.getListOfChildDimensions().get(i), 
parentQueryType);
+      }
+    }
+    return parentBlockIndex;
+  }
+
+  public static void getAllFilterDimensions(FilterResolverIntf 
filterResolverTree,
+      Set<CarbonDimension> filterDimensions, Set<CarbonMeasure> filterMeasure) 
{
+    if (null == filterResolverTree) {
+      return;
+    }
+    List<ColumnExpression> dimensionResolvedInfos = new 
ArrayList<ColumnExpression>();
+    Expression filterExpression = filterResolverTree.getFilterExpression();
+    addColumnDimensions(filterExpression, filterDimensions, filterMeasure);
+    for (ColumnExpression info : dimensionResolvedInfos) {
+      if (info.isDimension() && info.getDimension().getNumberOfChild() > 0) {
+        filterDimensions.add(info.getDimension());
+      }
+    }
+  }
+
+  /**
+   * This method will check if a given expression contains a column expression
+   * recursively and add the dimension instance to the set which holds the 
dimension
+   * instances of the complex filter expressions.
+   */
+  private static void addColumnDimensions(Expression expression,
+      Set<CarbonDimension> filterDimensions, Set<CarbonMeasure> filterMeasure) 
{
+    if (null != expression && expression instanceof ColumnExpression) {
+      if (((ColumnExpression) expression).isDimension()) {
+        filterDimensions.add(((ColumnExpression) expression).getDimension());
+      } else {
+        filterMeasure.add((CarbonMeasure) ((ColumnExpression) 
expression).getCarbonColumn());
+      }
+      return;
+    } else if (null != expression) {
+      for (Expression child : expression.getChildren()) {
+        addColumnDimensions(child, filterDimensions, filterMeasure);
+      }
+    }
+  }
+
+  private static Set<Integer> getFilterMeasureOrdinal(Set<CarbonMeasure> 
filterMeasures) {
+    Set<Integer> filterMeasuresOrdinal = new HashSet<>();
+    for (CarbonMeasure filterMeasure : filterMeasures) {
+      filterMeasuresOrdinal.add(filterMeasure.getOrdinal());
+    }
+    return filterMeasuresOrdinal;
+  }
+
+  private static Set<Integer> getFilterDimensionOrdinal(Set<CarbonDimension> 
filterDimensions) {
+    Set<Integer> filterDimensionsOrdinal = new HashSet<>();
+    for (CarbonDimension filterDimension : filterDimensions) {
+      filterDimensionsOrdinal.add(filterDimension.getOrdinal());
+      getChildDimensionOrdinal(filterDimension, filterDimensionsOrdinal);
+    }
+    return filterDimensionsOrdinal;
+  }
+
+  /**
+   * Below method will be used to fill the children dimension column id
+   */
+  private static void getChildDimensionOrdinal(CarbonDimension queryDimensions,
+      Set<Integer> filterDimensionsOrdinal) {
+    for (int j = 0; j < queryDimensions.numberOfChild(); j++) {
+      List<Encoding> encodingList = 
queryDimensions.getListOfChildDimensions().get(j).getEncoder();
+      if (queryDimensions.getListOfChildDimensions().get(j).numberOfChild() > 
0) {
+        
getChildDimensionOrdinal(queryDimensions.getListOfChildDimensions().get(j),
+            filterDimensionsOrdinal);
+      } else if (!CarbonUtil.hasEncoding(encodingList, 
Encoding.DIRECT_DICTIONARY)) {
+        
filterDimensionsOrdinal.add(queryDimensions.getListOfChildDimensions().get(j).getOrdinal());
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
new file mode 100644
index 0000000..67835b6
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/executor/util/RestructureUtil.java
@@ -0,0 +1,140 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.core.scan.executor.util;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.carbondata.core.metadata.Encoding;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.scan.executor.infos.AggregatorInfo;
+import org.apache.carbondata.core.scan.model.QueryDimension;
+import org.apache.carbondata.core.scan.model.QueryMeasure;
+
+/**
+ * Utility class for restructuring
+ */
+public class RestructureUtil {
+
+  /**
+   * Below method will be used to get the updated query dimension updation
+   * means, after restructuring some dimension will be not present in older
+   * table blocks in that case we need to select only those dimension out of
+   * query dimension which is present in the current table block
+   *
+   * @param queryDimensions
+   * @param tableBlockDimensions
+   * @return list of query dimension which is present in the table block
+   */
+  public static List<QueryDimension> 
getUpdatedQueryDimension(List<QueryDimension> queryDimensions,
+      List<CarbonDimension> tableBlockDimensions, List<CarbonDimension> 
tableComplexDimension) {
+    List<QueryDimension> presentDimension =
+        new 
ArrayList<QueryDimension>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
+    // selecting only those dimension which is present in the query
+    for (QueryDimension queryDimension : queryDimensions) {
+      if (queryDimension.getDimension().hasEncoding(Encoding.IMPLICIT)) {
+        presentDimension.add(queryDimension);
+      } else {
+        for (CarbonDimension tableDimension : tableBlockDimensions) {
+          if (tableDimension.equals(queryDimension.getDimension())) {
+            presentDimension.add(queryDimension);
+          }
+        }
+      }
+    }
+    for (QueryDimension queryDimimension : queryDimensions) {
+      for (CarbonDimension tableDimension : tableComplexDimension) {
+        if (tableDimension.equals(queryDimimension.getDimension())) {
+          presentDimension.add(queryDimimension);
+        }
+      }
+    }
+    return presentDimension;
+  }
+
+  /**
+   * Below method is to add dimension children for complex type dimension as
+   * internally we are creating dimension column for each each complex
+   * dimension so when complex query dimension request will come in the query,
+   * we need to add its children as it is hidden from the user For example if
+   * complex dimension is of Array of String[2] so we are storing 3 dimension
+   * and when user will query for complex type i.e. array type we need to add
+   * its children and then we will read respective block and create a tuple
+   * based on all three dimension
+   *
+   * @param queryDimensions      current query dimensions
+   * @param tableBlockDimensions dimensions which is present in the table block
+   * @return updated dimension(after adding complex type children)
+   */
+  public static List<CarbonDimension> addChildrenForComplexTypeDimension(
+      List<CarbonDimension> queryDimensions, List<CarbonDimension> 
tableBlockDimensions) {
+    List<CarbonDimension> updatedQueryDimension = new 
ArrayList<CarbonDimension>();
+    int numberOfChildren = 0;
+    for (CarbonDimension queryDimension : queryDimensions) {
+      // if number of child is zero, then it is not a complex dimension
+      // so directly add it query dimension
+      if (queryDimension.numberOfChild() == 0) {
+        updatedQueryDimension.add(queryDimension);
+      }
+      // if number of child is more than 1 then add all its children
+      numberOfChildren = queryDimension.getOrdinal() + 
queryDimension.numberOfChild();
+      for (int j = queryDimension.getOrdinal(); j < numberOfChildren; j++) {
+        updatedQueryDimension.add(tableBlockDimensions.get(j));
+      }
+    }
+    return updatedQueryDimension;
+  }
+
+  /**
+   * Below method will be used to get the aggregator info object
+   * in this method some of the properties which will be extracted
+   * from query measure and current block measures will be set
+   *
+   * @param queryMeasures        measures present in query
+   * @param currentBlockMeasures current block measures
+   * @return aggregator info
+   */
+  public static AggregatorInfo getAggregatorInfos(List<QueryMeasure> 
queryMeasures,
+      List<CarbonMeasure> currentBlockMeasures) {
+    AggregatorInfo aggregatorInfos = new AggregatorInfo();
+    int numberOfMeasureInQuery = queryMeasures.size();
+    int[] measureOrdinals = new int[numberOfMeasureInQuery];
+    Object[] defaultValues = new Object[numberOfMeasureInQuery];
+    boolean[] measureExistsInCurrentBlock = new 
boolean[numberOfMeasureInQuery];
+    int index = 0;
+    for (QueryMeasure queryMeasure : queryMeasures) {
+      measureOrdinals[index] = queryMeasure.getMeasure().getOrdinal();
+      // if query measure exists in current dimension measures
+      // then setting measure exists is true
+      // otherwise adding a default value of a measure
+      if (currentBlockMeasures.contains(queryMeasure.getMeasure())) {
+        measureExistsInCurrentBlock[index] = true;
+      } else {
+        defaultValues[index] = queryMeasure.getMeasure().getDefaultValue();
+      }
+      index++;
+    }
+    aggregatorInfos.setDefaultValues(defaultValues);
+    aggregatorInfos.setMeasureOrdinals(measureOrdinals);
+    aggregatorInfos.setMeasureExists(measureExistsInCurrentBlock);
+    return aggregatorInfos;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/scan/expression/BinaryExpression.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/expression/BinaryExpression.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/expression/BinaryExpression.java
new file mode 100644
index 0000000..8885841
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/expression/BinaryExpression.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.scan.expression;
+
+public abstract class BinaryExpression extends Expression {
+
+  private static final long serialVersionUID = 1L;
+  protected Expression left;
+  protected Expression right;
+
+  public BinaryExpression(Expression left, Expression right) {
+    this.left = left;
+    this.right = right;
+    children.add(left);
+    children.add(right);
+  }
+
+  public Expression getLeft() {
+    return left;
+  }
+
+  public Expression getRight() {
+    return right;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/scan/expression/ColumnExpression.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/expression/ColumnExpression.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/expression/ColumnExpression.java
new file mode 100644
index 0000000..4637abb
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/expression/ColumnExpression.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.scan.expression;
+
+import org.apache.carbondata.core.metadata.DataType;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
+import org.apache.carbondata.core.scan.filter.intf.ExpressionType;
+import org.apache.carbondata.core.scan.filter.intf.RowIntf;
+
+public class ColumnExpression extends LeafExpression {
+
+  private static final long serialVersionUID = 1L;
+
+  private String columnName;
+
+  private boolean isDimension;
+
+  private int colIndex = -1;
+
+  private DataType dataType;
+
+  private CarbonDimension dimension;
+
+  private CarbonColumn carbonColumn;
+
+  public ColumnExpression(String columnName, DataType dataType) {
+    this.columnName = columnName;
+    this.dataType = dataType;
+
+  }
+
+  public CarbonDimension getDimension() {
+    return dimension;
+  }
+
+  public void setDimension(CarbonDimension dimension) {
+    this.dimension = dimension;
+  }
+
+  public String getColumnName() {
+    return columnName;
+  }
+
+  public void setColumnName(String columnName) {
+    this.columnName = columnName;
+  }
+
+  public boolean isDimension() {
+    return isDimension;
+  }
+
+  public void setDimension(boolean isDimension) {
+    this.isDimension = isDimension;
+  }
+
+  public int getColIndex() {
+    return colIndex;
+  }
+
+  public void setColIndex(int colIndex) {
+    this.colIndex = colIndex;
+  }
+
+  public DataType getDataType() {
+    return dataType;
+  }
+
+  public void setDataType(DataType dataType) {
+    this.dataType = dataType;
+  }
+
+  @Override public ExpressionResult evaluate(RowIntf value) {
+    ExpressionResult expressionResult =
+        new ExpressionResult(dataType, (null == value ? null : 
value.getVal(colIndex)));
+    return expressionResult;
+  }
+
+  @Override public ExpressionType getFilterExpressionType() {
+    // TODO Auto-generated method stub
+    return null;
+  }
+
+  @Override public String getString() {
+    // TODO Auto-generated method stub
+    return "ColumnExpression(" + columnName + ')';
+  }
+
+  public CarbonColumn getCarbonColumn() {
+    return carbonColumn;
+  }
+
+  public void setCarbonColumn(CarbonColumn carbonColumn) {
+    this.carbonColumn = carbonColumn;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/scan/expression/Expression.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/expression/Expression.java 
b/core/src/main/java/org/apache/carbondata/core/scan/expression/Expression.java
new file mode 100644
index 0000000..a816643
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/expression/Expression.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.scan.expression;
+
+import java.io.Serializable;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import 
org.apache.carbondata.core.scan.expression.exception.FilterIllegalMemberException;
+import 
org.apache.carbondata.core.scan.expression.exception.FilterUnsupportedException;
+import org.apache.carbondata.core.scan.filter.intf.ExpressionType;
+import org.apache.carbondata.core.scan.filter.intf.RowIntf;
+
+public abstract class Expression implements Serializable {
+
+  private static final long serialVersionUID = -7568676723039530713L;
+  protected List<Expression> children =
+      new ArrayList<Expression>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
+
+  public abstract ExpressionResult evaluate(RowIntf value)
+      throws FilterUnsupportedException, FilterIllegalMemberException;
+
+  public abstract ExpressionType getFilterExpressionType();
+
+  public List<Expression> getChildren() {
+    return children;
+  }
+
+  public abstract String getString();
+
+}

Reply via email to