Refactor org.carbondata.query package (#692)

* fix style

* move query.carbon package one level up

* rename org.carbondata.query to org.carbondata.scan

* refactory


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: 
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/1c725f5b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/1c725f5b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/1c725f5b

Branch: refs/heads/master
Commit: 1c725f5ba2e95e82e4922e7a8e0aa81cdc671a80
Parents: 2936050
Author: Jacky Li <jacky.li...@huawei.com>
Authored: Sun Jun 19 02:30:21 2016 +0800
Committer: Ravindra Pesala <ravi.pes...@gmail.com>
Committed: Sun Jun 19 00:00:21 2016 +0530

----------------------------------------------------------------------
 .../chunk/DimensionColumnDataChunk.java         |    2 +-
 .../impl/ColumnGroupDimensionDataChunk.java     |    2 +-
 .../impl/FixedLengthDimensionDataChunk.java     |    2 +-
 .../impl/VariableLengthDimensionDataChunk.java  |    2 +-
 .../org/carbondata/core/util/CarbonUtil.java    |    1 -
 .../core/util/DataFileFooterConverter.java      |  416 ++++++
 .../collector/ScannedResultCollector.java       |   43 -
 .../impl/ListBasedResultCollector.java          |  212 ---
 .../query/carbon/executor/QueryExecutor.java    |   40 -
 .../carbon/executor/QueryExecutorFactory.java   |   38 -
 .../exception/QueryExecutionException.java      |   96 --
 .../executor/impl/AbstractQueryExecutor.java    |  403 ------
 .../executor/impl/DetailQueryExecutor.java      |   46 -
 .../impl/DetailRawRecordQueryExecutor.java      |   24 -
 .../executor/impl/QueryExecutorProperties.java  |   78 --
 .../carbon/executor/infos/AggregatorInfo.java   |  149 ---
 .../executor/infos/BlockExecutionInfo.java      |  611 ---------
 .../carbon/executor/infos/KeyStructureInfo.java |  125 --
 .../query/carbon/executor/infos/SortInfo.java   |  125 --
 .../internal/InternalQueryExecutor.java         |   47 -
 .../query/carbon/executor/util/QueryUtil.java   |  718 ----------
 .../carbon/executor/util/RestructureUtil.java   |  128 --
 .../query/carbon/model/CarbonQueryPlan.java     |  243 ----
 .../carbon/model/DimensionAggregatorInfo.java   |  113 --
 .../query/carbon/model/QueryColumn.java         |  109 --
 .../query/carbon/model/QueryDimension.java      |   57 -
 .../query/carbon/model/QueryMeasure.java        |   60 -
 .../query/carbon/model/QueryModel.java          |  516 --------
 .../query/carbon/model/QuerySchemaInfo.java     |   86 --
 .../query/carbon/model/SortOrderType.java       |   57 -
 .../processor/AbstractDataBlockIterator.java    |  126 --
 .../carbon/processor/BlockletIterator.java      |   88 --
 .../carbon/processor/BlocksChunkHolder.java     |  125 --
 .../processor/impl/DataBlockIteratorImpl.java   |   56 -
 .../carbon/result/AbstractScannedResult.java    |  347 -----
 .../query/carbon/result/BatchRawResult.java     |   43 -
 .../query/carbon/result/BatchResult.java        |   86 --
 .../carbon/result/ListBasedResultWrapper.java   |   38 -
 .../carbondata/query/carbon/result/Result.java  |   70 -
 .../FixedLengthKeyResultComparator.java         |   71 -
 .../VariableLengthKeyResultComparator.java      |   80 --
 .../result/impl/FilterQueryScannedResult.java   |  128 --
 .../carbon/result/impl/ListBasedResult.java     |  153 ---
 .../impl/NonFilterQueryScannedResult.java       |  109 --
 .../AbstractDetailQueryResultIterator.java      |  134 --
 .../ChunkBasedDetailResultIterator.java         |   75 --
 .../result/iterator/ChunkRowIterator.java       |   79 --
 .../iterator/DetailQueryResultIterator.java     |   84 --
 .../iterator/MemoryBasedResultIterator.java     |   51 -
 .../result/iterator/RawResultIterator.java      |  167 ---
 .../preparator/QueryResultPreparator.java       |   10 -
 .../impl/AbstractQueryResultPreparator.java     |   87 --
 .../impl/DetailQueryResultPreparatorImpl.java   |  139 --
 .../impl/RawQueryResultPreparatorImpl.java      |  127 --
 .../carbon/scanner/AbstractBlockletScanner.java |   62 -
 .../query/carbon/scanner/BlockletScanner.java   |   41 -
 .../carbon/scanner/impl/FilterScanner.java      |  174 ---
 .../carbon/scanner/impl/NonFilterScanner.java   |   37 -
 .../query/carbon/util/DataTypeUtil.java         |  175 ---
 .../query/carbon/wrappers/ByteArrayWrapper.java |  202 ---
 .../carbonfilterinterface/ExpressionType.java   |   44 -
 .../FilterExecuterType.java                     |   28 -
 .../query/carbonfilterinterface/RowImpl.java    |   44 -
 .../query/carbonfilterinterface/RowIntf.java    |   31 -
 .../complex/querytypes/GenericQueryType.java    |   73 --
 .../evaluators/DimColumnExecuterFilterInfo.java |   32 -
 .../evaluators/FilterProcessorPlaceHolder.java  |   24 -
 .../query/expression/BinaryExpression.java      |   59 -
 .../query/expression/ColumnExpression.java      |  114 --
 .../carbondata/query/expression/DataType.java   |   34 -
 .../carbondata/query/expression/Expression.java |   51 -
 .../query/expression/ExpressionResult.java      |  413 ------
 .../query/expression/LeafExpression.java        |   24 -
 .../query/expression/LiteralExpression.java     |   68 -
 .../query/expression/UnaryExpression.java       |   33 -
 .../query/expression/UnknownExpression.java     |    9 -
 .../expression/arithmetic/AddExpression.java    |   81 --
 .../arithmetic/BinaryArithmeticExpression.java  |   34 -
 .../expression/arithmetic/DivideExpression.java |   81 --
 .../arithmetic/MultiplyExpression.java          |   83 --
 .../arithmetic/SubstractExpression.java         |   83 --
 .../BinaryConditionalExpression.java            |   37 -
 .../conditional/ConditionalExpression.java      |   43 -
 .../conditional/EqualToExpression.java          |   95 --
 .../GreaterThanEqualToExpression.java           |   85 --
 .../conditional/GreaterThanExpression.java      |   88 --
 .../expression/conditional/InExpression.java    |   99 --
 .../conditional/LessThanEqualToExpression.java  |   88 --
 .../conditional/LessThanExpression.java         |   90 --
 .../expression/conditional/ListExpression.java  |   57 -
 .../conditional/NotEqualsExpression.java        |   93 --
 .../expression/conditional/NotInExpression.java |  100 --
 .../exception/FilterUnsupportedException.java   |   93 --
 .../query/expression/logical/AndExpression.java |   63 -
 .../logical/BinaryLogicalExpression.java        |  127 --
 .../query/expression/logical/NotExpression.java |   58 -
 .../query/expression/logical/OrExpression.java  |   60 -
 .../filter/executer/AndFilterExecuterImpl.java  |   62 -
 .../executer/ColGroupFilterExecuterImpl.java    |  190 ---
 .../executer/ExcludeFilterExecuterImpl.java     |  206 ---
 .../query/filter/executer/FilterExecuter.java   |   45 -
 .../executer/IncludeFilterExecuterImpl.java     |  225 ----
 .../filter/executer/OrFilterExecuterImpl.java   |   52 -
 .../executer/RestructureFilterExecuterImpl.java |   55 -
 .../executer/RowLevelFilterExecuterImpl.java    |  331 -----
 .../RowLevelRangeGrtThanFiterExecuterImpl.java  |   66 -
 ...elRangeGrtrThanEquaToFilterExecuterImpl.java |   66 -
 ...velRangeLessThanEqualFilterExecuterImpl.java |   66 -
 .../RowLevelRangeLessThanFiterExecuterImpl.java |   65 -
 .../RowLevelRangeTypeExecuterFacory.java        |   90 --
 .../filter/resolver/AndFilterResolverImpl.java  |   51 -
 .../resolver/ConditionalFilterResolverImpl.java |  239 ----
 .../filter/resolver/FilterResolverIntf.java     |   98 --
 .../resolver/LogicalFilterResolverImpl.java     |  109 --
 .../resolver/RestructureFilterResolverImpl.java |  210 ---
 .../resolver/RowLevelFilterResolverImpl.java    |  143 --
 .../RowLevelRangeFilterResolverImpl.java        |  232 ----
 .../metadata/FilterResolverMetadata.java        |   62 -
 .../DimColumnResolvedFilterInfo.java            |  206 ---
 .../MeasureColumnResolvedFilterInfo.java        |  105 --
 .../visitable/ResolvedFilterInfoVisitable.java  |   38 -
 .../visitor/CustomTypeDictionaryVisitor.java    |   80 --
 .../visitor/DictionaryColumnVisitor.java        |   58 -
 .../visitor/FilterInfoTypeVisitorFactory.java   |   45 -
 .../visitor/NoDictionaryTypeVisitor.java        |   56 -
 .../visitor/ResolvedFilterInfoVisitorIntf.java  |   40 -
 .../filters/FilterExpressionProcessor.java      |  361 -----
 .../query/filters/FilterProcessor.java          |   60 -
 .../filters/measurefilter/util/FilterUtil.java  | 1234 ------------------
 .../schema/metadata/DimColumnFilterInfo.java    |   63 -
 .../query/util/DataFileFooterConverter.java     |  418 ------
 .../scan/collector/ScannedResultCollector.java  |   43 +
 .../impl/ListBasedResultCollector.java          |  212 +++
 .../carbondata/scan/executor/QueryExecutor.java |   40 +
 .../scan/executor/QueryExecutorFactory.java     |   38 +
 .../exception/QueryExecutionException.java      |   96 ++
 .../executor/impl/AbstractQueryExecutor.java    |  403 ++++++
 .../scan/executor/impl/DetailQueryExecutor.java |   46 +
 .../impl/DetailRawRecordQueryExecutor.java      |   24 +
 .../executor/impl/QueryExecutorProperties.java  |   78 ++
 .../scan/executor/infos/AggregatorInfo.java     |  149 +++
 .../scan/executor/infos/BlockExecutionInfo.java |  611 +++++++++
 .../scan/executor/infos/KeyStructureInfo.java   |  125 ++
 .../scan/executor/infos/SortInfo.java           |  125 ++
 .../scan/executor/util/QueryUtil.java           |  718 ++++++++++
 .../scan/executor/util/RestructureUtil.java     |  128 ++
 .../scan/expression/BinaryExpression.java       |   59 +
 .../scan/expression/ColumnExpression.java       |  112 ++
 .../carbondata/scan/expression/DataType.java    |   34 +
 .../carbondata/scan/expression/Expression.java  |   48 +
 .../scan/expression/ExpressionResult.java       |  413 ++++++
 .../scan/expression/LeafExpression.java         |   24 +
 .../scan/expression/LiteralExpression.java      |   68 +
 .../scan/expression/UnaryExpression.java        |   33 +
 .../scan/expression/UnknownExpression.java      |    9 +
 .../expression/arithmetic/AddExpression.java    |   81 ++
 .../arithmetic/BinaryArithmeticExpression.java  |   34 +
 .../expression/arithmetic/DivideExpression.java |   81 ++
 .../arithmetic/MultiplyExpression.java          |   83 ++
 .../arithmetic/SubstractExpression.java         |   83 ++
 .../BinaryConditionalExpression.java            |   37 +
 .../conditional/ConditionalExpression.java      |   43 +
 .../conditional/EqualToExpression.java          |   95 ++
 .../GreaterThanEqualToExpression.java           |   85 ++
 .../conditional/GreaterThanExpression.java      |   89 ++
 .../expression/conditional/InExpression.java    |   99 ++
 .../conditional/LessThanEqualToExpression.java  |   88 ++
 .../conditional/LessThanExpression.java         |   90 ++
 .../expression/conditional/ListExpression.java  |   57 +
 .../conditional/NotEqualsExpression.java        |   93 ++
 .../expression/conditional/NotInExpression.java |  100 ++
 .../exception/FilterUnsupportedException.java   |   93 ++
 .../scan/expression/logical/AndExpression.java  |   63 +
 .../logical/BinaryLogicalExpression.java        |  127 ++
 .../scan/expression/logical/NotExpression.java  |   58 +
 .../scan/expression/logical/OrExpression.java   |   60 +
 .../scan/filter/DimColumnFilterInfo.java        |   61 +
 .../scan/filter/FilterExpressionProcessor.java  |  360 +++++
 .../carbondata/scan/filter/FilterProcessor.java |   60 +
 .../org/carbondata/scan/filter/FilterUtil.java  | 1233 +++++++++++++++++
 .../scan/filter/GenericQueryType.java           |   73 ++
 .../filter/executer/AndFilterExecuterImpl.java  |   62 +
 .../executer/ColGroupFilterExecuterImpl.java    |  190 +++
 .../executer/DimColumnExecuterFilterInfo.java   |   32 +
 .../executer/ExcludeFilterExecuterImpl.java     |  205 +++
 .../scan/filter/executer/FilterExecuter.java    |   45 +
 .../executer/IncludeFilterExecuterImpl.java     |  224 ++++
 .../filter/executer/OrFilterExecuterImpl.java   |   52 +
 .../executer/RestructureFilterExecuterImpl.java |   55 +
 .../executer/RowLevelFilterExecuterImpl.java    |  331 +++++
 .../RowLevelRangeGrtThanFiterExecuterImpl.java  |   66 +
 ...elRangeGrtrThanEquaToFilterExecuterImpl.java |   66 +
 ...velRangeLessThanEqualFilterExecuterImpl.java |   66 +
 .../RowLevelRangeLessThanFiterExecuterImpl.java |   65 +
 .../RowLevelRangeTypeExecuterFacory.java        |   90 ++
 .../scan/filter/intf/ExpressionType.java        |   44 +
 .../scan/filter/intf/FilterExecuterType.java    |   28 +
 .../carbondata/scan/filter/intf/RowImpl.java    |   44 +
 .../carbondata/scan/filter/intf/RowIntf.java    |   31 +
 .../filter/resolver/AndFilterResolverImpl.java  |   51 +
 .../resolver/ConditionalFilterResolverImpl.java |  239 ++++
 .../filter/resolver/FilterResolverIntf.java     |   98 ++
 .../resolver/LogicalFilterResolverImpl.java     |  110 ++
 .../resolver/RestructureFilterResolverImpl.java |  211 +++
 .../resolver/RowLevelFilterResolverImpl.java    |  141 ++
 .../RowLevelRangeFilterResolverImpl.java        |  233 ++++
 .../metadata/FilterResolverMetadata.java        |   62 +
 .../DimColumnResolvedFilterInfo.java            |  206 +++
 .../MeasureColumnResolvedFilterInfo.java        |  105 ++
 .../visitable/ResolvedFilterInfoVisitable.java  |   38 +
 .../visitor/CustomTypeDictionaryVisitor.java    |   80 ++
 .../visitor/DictionaryColumnVisitor.java        |   58 +
 .../visitor/FilterInfoTypeVisitorFactory.java   |   45 +
 .../visitor/NoDictionaryTypeVisitor.java        |   56 +
 .../visitor/ResolvedFilterInfoVisitorIntf.java  |   40 +
 .../carbondata/scan/model/CarbonQueryPlan.java  |  243 ++++
 .../org/carbondata/scan/model/QueryColumn.java  |  109 ++
 .../carbondata/scan/model/QueryDimension.java   |   58 +
 .../org/carbondata/scan/model/QueryMeasure.java |   61 +
 .../org/carbondata/scan/model/QueryModel.java   |  516 ++++++++
 .../carbondata/scan/model/QuerySchemaInfo.java  |   86 ++
 .../carbondata/scan/model/SortOrderType.java    |   57 +
 .../processor/AbstractDataBlockIterator.java    |  126 ++
 .../scan/processor/BlockletIterator.java        |   88 ++
 .../scan/processor/BlocksChunkHolder.java       |  125 ++
 .../processor/impl/DataBlockIteratorImpl.java   |   56 +
 .../scan/result/AbstractScannedResult.java      |  347 +++++
 .../carbondata/scan/result/BatchRawResult.java  |   43 +
 .../org/carbondata/scan/result/BatchResult.java |   86 ++
 .../scan/result/ListBasedResultWrapper.java     |   57 +
 .../java/org/carbondata/scan/result/Result.java |   70 +
 .../result/impl/FilterQueryScannedResult.java   |  128 ++
 .../scan/result/impl/ListBasedResult.java       |  153 +++
 .../impl/NonFilterQueryScannedResult.java       |  109 ++
 .../AbstractDetailQueryResultIterator.java      |  128 ++
 .../scan/result/iterator/ChunkRowIterator.java  |   79 ++
 .../iterator/DetailQueryResultIterator.java     |   84 ++
 .../scan/result/iterator/RawResultIterator.java |  167 +++
 .../preparator/QueryResultPreparator.java       |   10 +
 .../impl/AbstractQueryResultPreparator.java     |   87 ++
 .../impl/DetailQueryResultPreparatorImpl.java   |  139 ++
 .../impl/RawQueryResultPreparatorImpl.java      |  127 ++
 .../scan/scanner/AbstractBlockletScanner.java   |   62 +
 .../scan/scanner/BlockletScanner.java           |   41 +
 .../scan/scanner/impl/FilterScanner.java        |  174 +++
 .../scan/scanner/impl/NonFilterScanner.java     |   37 +
 .../org/carbondata/scan/util/DataTypeUtil.java  |  175 +++
 .../scan/wrappers/ByteArrayWrapper.java         |  202 +++
 .../org/carbondata/query/QueryExecutor_UT.java  |   35 -
 .../carbon/executor/util/QueryUtilTest.java     |  133 --
 .../org/carbondata/scan/QueryExecutor_UT.java   |   35 +
 .../scan/executor/util/QueryUtilTest.java       |  133 ++
 dev/findbugs-exclude.xml                        |    4 +-
 .../carbondata/hadoop/CarbonInputFormat.java    |   16 +-
 .../carbondata/hadoop/CarbonRecordReader.java   |   10 +-
 .../hadoop/util/CarbonInputFormatUtil.java      |   14 +-
 .../hadoop/ft/CarbonInputFormat_FT.java         |   11 +-
 .../hadoop/ft/CarbonInputMapperTest.java        |   10 +-
 .../test/util/ObjectSerializationUtilTest.java  |   10 +-
 .../spark/merger/CarbonCompactionExecutor.java  |   16 +-
 .../spark/merger/RowResultMerger.java           |    4 +-
 .../spark/merger/TupleConversionAdapter.java    |    2 +-
 .../spark/partition/api/DataPartitioner.java    |    2 +-
 .../api/impl/QueryPartitionHelper.java          |    2 +-
 .../api/impl/SampleDataPartitionerImpl.java     |    2 +-
 .../readsupport/SparkRowReadSupportImpl.java    |    2 +-
 .../carbondata/spark/util/CarbonQueryUtil.java  |    2 +-
 .../apache/spark/sql/CarbonBoundReference.scala |    2 +-
 .../sql/CarbonDatasourceHadoopRelation.scala    |    2 +-
 .../spark/sql/CarbonDictionaryDecoder.scala     |    2 +-
 .../org/apache/spark/sql/CarbonOperators.scala  |    2 +-
 .../spark/sql/SparkUnknownExpression.scala      |    8 +-
 .../org/carbondata/spark/CarbonFilters.scala    |    6 +-
 .../carbondata/spark/rdd/CarbonMergerRDD.scala  |    2 +-
 .../carbondata/spark/rdd/CarbonScanRDD.scala    |   11 +-
 .../carbondata/spark/util/CarbonScalaUtil.scala |    2 +-
 276 files changed, 15220 insertions(+), 15680 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/DimensionColumnDataChunk.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/DimensionColumnDataChunk.java
 
b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/DimensionColumnDataChunk.java
index e3371ac..75c29d2 100644
--- 
a/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/DimensionColumnDataChunk.java
+++ 
b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/DimensionColumnDataChunk.java
@@ -18,7 +18,7 @@
  */
 package org.carbondata.core.carbon.datastore.chunk;
 
-import org.carbondata.query.carbon.executor.infos.KeyStructureInfo;
+import org.carbondata.scan.executor.infos.KeyStructureInfo;
 
 /**
  * Interface for dimension column chunk.

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
 
b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
index 55c38d3..07d2e2e 100644
--- 
a/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
+++ 
b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
@@ -20,7 +20,7 @@ package org.carbondata.core.carbon.datastore.chunk.impl;
 
 import org.carbondata.core.carbon.datastore.chunk.DimensionChunkAttributes;
 import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.carbondata.query.carbon.executor.infos.KeyStructureInfo;
+import org.carbondata.scan.executor.infos.KeyStructureInfo;
 
 /**
  * This class is holder of the dimension column chunk data of the fixed length

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
 
b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
index 3a544a4..08ae703 100644
--- 
a/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
+++ 
b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
@@ -20,7 +20,7 @@ package org.carbondata.core.carbon.datastore.chunk.impl;
 
 import org.carbondata.core.carbon.datastore.chunk.DimensionChunkAttributes;
 import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.carbondata.query.carbon.executor.infos.KeyStructureInfo;
+import org.carbondata.scan.executor.infos.KeyStructureInfo;
 
 /**
  * This class is holder of the dimension column chunk data of the fixed length

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
 
b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
index 9ce3257..51306b5 100644
--- 
a/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
+++ 
b/core/src/main/java/org/carbondata/core/carbon/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
@@ -4,7 +4,7 @@ import java.util.List;
 
 import org.carbondata.core.carbon.datastore.chunk.DimensionChunkAttributes;
 import org.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.carbondata.query.carbon.executor.infos.KeyStructureInfo;
+import org.carbondata.scan.executor.infos.KeyStructureInfo;
 
 /**
  * This class is holder of the dimension column chunk data of the variable

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/carbondata/core/util/CarbonUtil.java 
b/core/src/main/java/org/carbondata/core/util/CarbonUtil.java
index 2772362..42628b2 100644
--- a/core/src/main/java/org/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/carbondata/core/util/CarbonUtil.java
@@ -71,7 +71,6 @@ import org.carbondata.core.metadata.SliceMetaData;
 import org.carbondata.core.metadata.ValueEncoderMeta;
 import org.carbondata.core.reader.CarbonFooterReader;
 import org.carbondata.core.vo.ColumnGroupModel;
-import org.carbondata.query.util.DataFileFooterConverter;
 
 import org.apache.commons.lang.ArrayUtils;
 import org.apache.hadoop.conf.Configuration;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/core/util/DataFileFooterConverter.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/core/util/DataFileFooterConverter.java 
b/core/src/main/java/org/carbondata/core/util/DataFileFooterConverter.java
new file mode 100644
index 0000000..6165731
--- /dev/null
+++ b/core/src/main/java/org/carbondata/core/util/DataFileFooterConverter.java
@@ -0,0 +1,416 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.carbondata.core.util;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.ObjectInputStream;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.BitSet;
+import java.util.Iterator;
+import java.util.List;
+
+import org.carbondata.common.logging.LogService;
+import org.carbondata.common.logging.LogServiceFactory;
+import org.carbondata.core.carbon.metadata.blocklet.BlockletInfo;
+import org.carbondata.core.carbon.metadata.blocklet.DataFileFooter;
+import org.carbondata.core.carbon.metadata.blocklet.SegmentInfo;
+import 
org.carbondata.core.carbon.metadata.blocklet.compressor.ChunkCompressorMeta;
+import 
org.carbondata.core.carbon.metadata.blocklet.compressor.CompressionCodec;
+import org.carbondata.core.carbon.metadata.blocklet.datachunk.DataChunk;
+import org.carbondata.core.carbon.metadata.blocklet.datachunk.PresenceMeta;
+import org.carbondata.core.carbon.metadata.blocklet.index.BlockletBTreeIndex;
+import org.carbondata.core.carbon.metadata.blocklet.index.BlockletIndex;
+import org.carbondata.core.carbon.metadata.blocklet.index.BlockletMinMaxIndex;
+import org.carbondata.core.carbon.metadata.blocklet.sort.SortState;
+import org.carbondata.core.carbon.metadata.datatype.DataType;
+import org.carbondata.core.carbon.metadata.encoder.Encoding;
+import org.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema;
+import org.carbondata.core.datastorage.store.FileHolder;
+import org.carbondata.core.datastorage.store.impl.FileFactory;
+import org.carbondata.core.metadata.ValueEncoderMeta;
+import org.carbondata.core.reader.CarbonFooterReader;
+import org.carbondata.format.FileFooter;
+
+/**
+ * Below class will be used to convert the thrift object of data file
+ * meta data to wrapper object
+ */
+class DataFileFooterConverter {
+
+  private static final LogService LOGGER =
+      LogServiceFactory.getLogService(DataFileFooterConverter.class.getName());
+
+  /**
+   * Below method will be used to convert thrift file meta to wrapper file meta
+   */
+  public DataFileFooter readDataFileFooter(String filePath, long blockOffset, 
long blockLength)
+      throws IOException {
+    DataFileFooter dataFileFooter = new DataFileFooter();
+    FileHolder fileReader = null;
+    try {
+      long completeBlockLength = blockOffset + blockLength;
+      long footerPointer = completeBlockLength - 8;
+      fileReader = 
FileFactory.getFileHolder(FileFactory.getFileType(filePath));
+      long actualFooterOffset = fileReader.readLong(filePath, footerPointer);
+      CarbonFooterReader reader = new CarbonFooterReader(filePath, 
actualFooterOffset);
+      FileFooter footer = reader.readFooter();
+      dataFileFooter.setVersionId(footer.getVersion());
+      dataFileFooter.setNumberOfRows(footer.getNum_rows());
+      dataFileFooter.setSegmentInfo(getSegmentInfo(footer.getSegment_info()));
+      List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
+      List<org.carbondata.format.ColumnSchema> table_columns = 
footer.getTable_columns();
+      for (int i = 0; i < table_columns.size(); i++) {
+        
columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
+      }
+      dataFileFooter.setColumnInTable(columnSchemaList);
+
+      List<org.carbondata.format.BlockletIndex> leaf_node_indices_Thrift =
+          footer.getBlocklet_index_list();
+      List<BlockletIndex> blockletIndexList = new ArrayList<BlockletIndex>();
+      for (int i = 0; i < leaf_node_indices_Thrift.size(); i++) {
+        BlockletIndex blockletIndex = 
getBlockletIndex(leaf_node_indices_Thrift.get(i));
+        blockletIndexList.add(blockletIndex);
+      }
+
+      List<org.carbondata.format.BlockletInfo> leaf_node_infos_Thrift =
+          footer.getBlocklet_info_list();
+      List<BlockletInfo> blockletInfoList = new ArrayList<BlockletInfo>();
+      for (int i = 0; i < leaf_node_infos_Thrift.size(); i++) {
+        BlockletInfo blockletInfo = 
getBlockletInfo(leaf_node_infos_Thrift.get(i));
+        blockletInfo.setBlockletIndex(blockletIndexList.get(i));
+        blockletInfoList.add(blockletInfo);
+      }
+      dataFileFooter.setBlockletList(blockletInfoList);
+      
dataFileFooter.setBlockletIndex(getBlockletIndexForDataFileFooter(blockletIndexList));
+    } finally {
+      if (null != fileReader) {
+        fileReader.finish();
+      }
+    }
+    return dataFileFooter;
+  }
+
+  /**
+   * Below method will be used to get blocklet index for data file meta
+   *
+   * @param blockletIndexList
+   * @return blocklet index
+   */
+  private BlockletIndex getBlockletIndexForDataFileFooter(List<BlockletIndex> 
blockletIndexList) {
+    BlockletIndex blockletIndex = new BlockletIndex();
+    BlockletBTreeIndex blockletBTreeIndex = new BlockletBTreeIndex();
+    
blockletBTreeIndex.setStartKey(blockletIndexList.get(0).getBtreeIndex().getStartKey());
+    blockletBTreeIndex
+        .setEndKey(blockletIndexList.get(blockletIndexList.size() - 
1).getBtreeIndex().getEndKey());
+    blockletIndex.setBtreeIndex(blockletBTreeIndex);
+    byte[][] currentMinValue = 
blockletIndexList.get(0).getMinMaxIndex().getMinValues().clone();
+    byte[][] currentMaxValue = 
blockletIndexList.get(0).getMinMaxIndex().getMaxValues().clone();
+    byte[][] minValue = null;
+    byte[][] maxValue = null;
+    for (int i = 1; i < blockletIndexList.size(); i++) {
+      minValue = blockletIndexList.get(i).getMinMaxIndex().getMinValues();
+      maxValue = blockletIndexList.get(i).getMinMaxIndex().getMaxValues();
+      for (int j = 0; j < maxValue.length; j++) {
+        if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(currentMinValue[j], 
minValue[j]) > 0) {
+          currentMinValue[j] = minValue[j].clone();
+        }
+        if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(currentMaxValue[j], 
maxValue[j]) < 0) {
+          currentMaxValue[j] = maxValue[j].clone();
+        }
+      }
+    }
+
+    BlockletMinMaxIndex minMax = new BlockletMinMaxIndex();
+    minMax.setMaxValues(currentMaxValue);
+    minMax.setMinValues(currentMinValue);
+    blockletIndex.setMinMaxIndex(minMax);
+    return blockletIndex;
+  }
+
+  private ColumnSchema thriftColumnSchmeaToWrapperColumnSchema(
+      org.carbondata.format.ColumnSchema externalColumnSchema) {
+    ColumnSchema wrapperColumnSchema = new ColumnSchema();
+    wrapperColumnSchema.setColumnUniqueId(externalColumnSchema.getColumn_id());
+    wrapperColumnSchema.setColumnName(externalColumnSchema.getColumn_name());
+    wrapperColumnSchema.setColumnar(externalColumnSchema.isColumnar());
+    wrapperColumnSchema
+        
.setDataType(thriftDataTyopeToWrapperDataType(externalColumnSchema.data_type));
+    wrapperColumnSchema.setDimensionColumn(externalColumnSchema.isDimension());
+    List<Encoding> encoders = new ArrayList<Encoding>();
+    for (org.carbondata.format.Encoding encoder : 
externalColumnSchema.getEncoders()) {
+      encoders.add(fromExternalToWrapperEncoding(encoder));
+    }
+    wrapperColumnSchema.setEncodingList(encoders);
+    wrapperColumnSchema.setNumberOfChild(externalColumnSchema.getNum_child());
+    wrapperColumnSchema.setPrecision(externalColumnSchema.getPrecision());
+    
wrapperColumnSchema.setColumnGroup(externalColumnSchema.getColumn_group_id());
+    wrapperColumnSchema.setScale(externalColumnSchema.getScale());
+    
wrapperColumnSchema.setDefaultValue(externalColumnSchema.getDefault_value());
+    
wrapperColumnSchema.setAggregateFunction(externalColumnSchema.getAggregate_function());
+    return wrapperColumnSchema;
+  }
+
+  /**
+   * Below method is to convert the blocklet info of the thrift to wrapper
+   * blocklet info
+   *
+   * @param blockletInfoThrift blocklet info of the thrift
+   * @return blocklet info wrapper
+   */
+  private BlockletInfo getBlockletInfo(org.carbondata.format.BlockletInfo 
blockletInfoThrift) {
+    BlockletInfo blockletInfo = new BlockletInfo();
+    List<DataChunk> dimensionColumnChunk = new ArrayList<DataChunk>();
+    List<DataChunk> measureChunk = new ArrayList<DataChunk>();
+    Iterator<org.carbondata.format.DataChunk> column_data_chunksIterator =
+        blockletInfoThrift.getColumn_data_chunksIterator();
+    while (column_data_chunksIterator.hasNext()) {
+      org.carbondata.format.DataChunk next = column_data_chunksIterator.next();
+      if (next.isRowMajor()) {
+        dimensionColumnChunk.add(getDataChunk(next, false));
+      } else if 
(next.getEncoders().contains(org.carbondata.format.Encoding.DELTA)) {
+        measureChunk.add(getDataChunk(next, true));
+      } else {
+
+        dimensionColumnChunk.add(getDataChunk(next, false));
+      }
+    }
+    blockletInfo.setDimensionColumnChunk(dimensionColumnChunk);
+    blockletInfo.setMeasureColumnChunk(measureChunk);
+    blockletInfo.setNumberOfRows(blockletInfoThrift.getNum_rows());
+    return blockletInfo;
+  }
+
+  /**
+   * Below method is convert the thrift encoding to wrapper encoding
+   *
+   * @param encoderThrift thrift encoding
+   * @return wrapper encoding
+   */
+  private Encoding 
fromExternalToWrapperEncoding(org.carbondata.format.Encoding encoderThrift) {
+    switch (encoderThrift) {
+      case DICTIONARY:
+        return Encoding.DICTIONARY;
+      case DELTA:
+        return Encoding.DELTA;
+      case RLE:
+        return Encoding.RLE;
+      case INVERTED_INDEX:
+        return Encoding.INVERTED_INDEX;
+      case BIT_PACKED:
+        return Encoding.BIT_PACKED;
+      case DIRECT_DICTIONARY:
+        return Encoding.DIRECT_DICTIONARY;
+      default:
+        return Encoding.DICTIONARY;
+    }
+  }
+
+  /**
+   * Below method will be used to convert the thrift compression to wrapper
+   * compression codec
+   *
+   * @param compressionCodecThrift
+   * @return wrapper compression codec
+   */
+  private CompressionCodec getCompressionCodec(
+      org.carbondata.format.CompressionCodec compressionCodecThrift) {
+    switch (compressionCodecThrift) {
+      case SNAPPY:
+        return CompressionCodec.SNAPPY;
+      default:
+        return CompressionCodec.SNAPPY;
+    }
+  }
+
+  /**
+   * Below method will be used to convert thrift segment object to wrapper
+   * segment object
+   *
+   * @param segmentInfo thrift segment info object
+   * @return wrapper segment info object
+   */
+  private SegmentInfo getSegmentInfo(org.carbondata.format.SegmentInfo 
segmentInfo) {
+    SegmentInfo info = new SegmentInfo();
+    int[] cardinality = new int[segmentInfo.getColumn_cardinalities().size()];
+    for (int i = 0; i < cardinality.length; i++) {
+      cardinality[i] = segmentInfo.getColumn_cardinalities().get(i);
+    }
+    info.setColumnCardinality(cardinality);
+    info.setNumberOfColumns(segmentInfo.getNum_cols());
+    return info;
+  }
+
+  /**
+   * Below method will be used to convert the blocklet index of thrift to
+   * wrapper
+   *
+   * @param blockletIndexThrift
+   * @return blocklet index wrapper
+   */
+  private BlockletIndex getBlockletIndex(org.carbondata.format.BlockletIndex 
blockletIndexThrift) {
+    org.carbondata.format.BlockletBTreeIndex btreeIndex = 
blockletIndexThrift.getB_tree_index();
+    org.carbondata.format.BlockletMinMaxIndex minMaxIndex = 
blockletIndexThrift.getMin_max_index();
+    return new BlockletIndex(
+        new BlockletBTreeIndex(btreeIndex.getStart_key(), 
btreeIndex.getEnd_key()),
+        new BlockletMinMaxIndex(minMaxIndex.getMin_values(), 
minMaxIndex.getMax_values()));
+  }
+
+  /**
+   * Below method will be used to convert the thrift compression meta to
+   * wrapper chunk compression meta
+   *
+   * @param chunkCompressionMetaThrift
+   * @return chunkCompressionMetaWrapper
+   */
+  private ChunkCompressorMeta getChunkCompressionMeta(
+      org.carbondata.format.ChunkCompressionMeta chunkCompressionMetaThrift) {
+    ChunkCompressorMeta compressorMeta = new ChunkCompressorMeta();
+    compressorMeta
+        
.setCompressor(getCompressionCodec(chunkCompressionMetaThrift.getCompression_codec()));
+    
compressorMeta.setCompressedSize(chunkCompressionMetaThrift.getTotal_compressed_size());
+    
compressorMeta.setUncompressedSize(chunkCompressionMetaThrift.getTotal_uncompressed_size());
+    return compressorMeta;
+  }
+
+  /**
+   * Below method will be used to convert the thrift data type to wrapper data
+   * type
+   *
+   * @param dataTypeThrift
+   * @return dataType wrapper
+   */
+  private DataType 
thriftDataTyopeToWrapperDataType(org.carbondata.format.DataType dataTypeThrift) 
{
+    switch (dataTypeThrift) {
+      case STRING:
+        return DataType.STRING;
+      case INT:
+        return DataType.INT;
+      case LONG:
+        return DataType.LONG;
+      case DOUBLE:
+        return DataType.DOUBLE;
+      case DECIMAL:
+        return DataType.DECIMAL;
+      case TIMESTAMP:
+        return DataType.TIMESTAMP;
+      case ARRAY:
+        return DataType.ARRAY;
+      case STRUCT:
+        return DataType.STRUCT;
+      default:
+        return DataType.STRING;
+    }
+  }
+
+  /**
+   * Below method will be used to convert the thrift presence meta to wrapper
+   * presence meta
+   *
+   * @param presentMetadataThrift
+   * @return wrapper presence meta
+   */
+  private PresenceMeta getPresenceMeta(org.carbondata.format.PresenceMeta 
presentMetadataThrift) {
+    PresenceMeta presenceMeta = new PresenceMeta();
+    
presenceMeta.setRepresentNullValues(presentMetadataThrift.isRepresents_presence());
+    
presenceMeta.setBitSet(BitSet.valueOf(presentMetadataThrift.getPresent_bit_stream()));
+    return presenceMeta;
+  }
+
+  /**
+   * Below method will be used to convert the thrift object to wrapper object
+   *
+   * @param sortStateThrift
+   * @return wrapper sort state object
+   */
+  private SortState getSortState(org.carbondata.format.SortState 
sortStateThrift) {
+    if (sortStateThrift == org.carbondata.format.SortState.SORT_EXPLICIT) {
+      return SortState.SORT_EXPLICT;
+    } else if (sortStateThrift == org.carbondata.format.SortState.SORT_NATIVE) 
{
+      return SortState.SORT_NATIVE;
+    } else {
+      return SortState.SORT_NONE;
+    }
+  }
+
+  /**
+   * Below method will be used to convert the thrift data chunk to wrapper
+   * data chunk
+   *
+   * @param datachunkThrift
+   * @return wrapper data chunk
+   */
+  private DataChunk getDataChunk(org.carbondata.format.DataChunk 
datachunkThrift,
+      boolean isPresenceMetaPresent) {
+    DataChunk dataChunk = new DataChunk();
+    dataChunk.setColumnUniqueIdList(datachunkThrift.getColumn_ids());
+    dataChunk.setDataPageLength(datachunkThrift.getData_page_length());
+    dataChunk.setDataPageOffset(datachunkThrift.getData_page_offset());
+    if (isPresenceMetaPresent) {
+      
dataChunk.setNullValueIndexForColumn(getPresenceMeta(datachunkThrift.getPresence()));
+    }
+    dataChunk.setRlePageLength(datachunkThrift.getRle_page_length());
+    dataChunk.setRlePageOffset(datachunkThrift.getRle_page_offset());
+    dataChunk.setRowMajor(datachunkThrift.isRowMajor());
+    dataChunk.setRowIdPageLength(datachunkThrift.getRowid_page_length());
+    dataChunk.setRowIdPageOffset(datachunkThrift.getRowid_page_offset());
+    dataChunk.setSortState(getSortState(datachunkThrift.getSort_state()));
+    
dataChunk.setChunkCompressionMeta(getChunkCompressionMeta(datachunkThrift.getChunk_meta()));
+    List<Encoding> encodingList = new 
ArrayList<Encoding>(datachunkThrift.getEncoders().size());
+    for (int i = 0; i < datachunkThrift.getEncoders().size(); i++) {
+      
encodingList.add(fromExternalToWrapperEncoding(datachunkThrift.getEncoders().get(i)));
+    }
+    dataChunk.setEncoderList(encodingList);
+    if (encodingList.contains(Encoding.DELTA)) {
+      List<ByteBuffer> thriftEncoderMeta = datachunkThrift.getEncoder_meta();
+      List<ValueEncoderMeta> encodeMetaList =
+          new ArrayList<ValueEncoderMeta>(thriftEncoderMeta.size());
+      for (int i = 0; i < thriftEncoderMeta.size(); i++) {
+        
encodeMetaList.add(deserializeEncoderMeta(thriftEncoderMeta.get(i).array()));
+      }
+      dataChunk.setValueEncoderMeta(encodeMetaList);
+    }
+    return dataChunk;
+  }
+
+  /**
+   * Below method will be used to convert the encode metadata to
+   * ValueEncoderMeta object
+   *
+   * @param encoderMeta
+   * @return ValueEncoderMeta object
+   */
+  private ValueEncoderMeta deserializeEncoderMeta(byte[] encoderMeta) {
+    // TODO : should remove the unnecessary fields.
+    ByteArrayInputStream aos = null;
+    ObjectInputStream objStream = null;
+    ValueEncoderMeta meta = null;
+    try {
+      aos = new ByteArrayInputStream(encoderMeta);
+      objStream = new ObjectInputStream(aos);
+      meta = (ValueEncoderMeta) objStream.readObject();
+    } catch (ClassNotFoundException e) {
+      LOGGER.error(e);
+    } catch (IOException e) {
+      CarbonUtil.closeStreams(objStream);
+    }
+    return meta;
+  }
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/query/carbon/collector/ScannedResultCollector.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/query/carbon/collector/ScannedResultCollector.java
 
b/core/src/main/java/org/carbondata/query/carbon/collector/ScannedResultCollector.java
deleted file mode 100644
index 9e5d401..0000000
--- 
a/core/src/main/java/org/carbondata/query/carbon/collector/ScannedResultCollector.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.query.carbon.collector;
-
-import org.carbondata.query.carbon.result.AbstractScannedResult;
-import org.carbondata.query.carbon.result.Result;
-
-/**
- * Interface which will be used to aggregate the scan result
- */
-public interface ScannedResultCollector {
-
-  /**
-   * Below method will be used to aggregate the scanned result
-   *
-   * @param scannedResult scanned result
-   * @return how many records was aggregated
-   */
-  int collectData(AbstractScannedResult scannedResult, int batchSize);
-
-  /**
-   * Below method will be used to get the aggregated result
-   *
-   * @return
-   */
-  Result getCollectedResult();
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/query/carbon/collector/impl/ListBasedResultCollector.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/query/carbon/collector/impl/ListBasedResultCollector.java
 
b/core/src/main/java/org/carbondata/query/carbon/collector/impl/ListBasedResultCollector.java
deleted file mode 100644
index 30d33b8..0000000
--- 
a/core/src/main/java/org/carbondata/query/carbon/collector/impl/ListBasedResultCollector.java
+++ /dev/null
@@ -1,212 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.query.carbon.collector.impl;
-
-import java.util.ArrayList;
-import java.util.List;
-
-import org.carbondata.common.logging.LogService;
-import org.carbondata.common.logging.LogServiceFactory;
-import org.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
-import org.carbondata.core.carbon.metadata.datatype.DataType;
-import org.carbondata.core.keygenerator.KeyGenException;
-import org.carbondata.query.carbon.collector.ScannedResultCollector;
-import org.carbondata.query.carbon.executor.infos.BlockExecutionInfo;
-import org.carbondata.query.carbon.executor.infos.KeyStructureInfo;
-import org.carbondata.query.carbon.executor.util.QueryUtil;
-import org.carbondata.query.carbon.result.AbstractScannedResult;
-import org.carbondata.query.carbon.result.ListBasedResultWrapper;
-import org.carbondata.query.carbon.result.Result;
-import org.carbondata.query.carbon.result.impl.ListBasedResult;
-import org.carbondata.query.carbon.util.DataTypeUtil;
-import org.carbondata.query.carbon.wrappers.ByteArrayWrapper;
-
-/**
- * It is not a collector it is just a scanned result holder.
- *
- */
-public class ListBasedResultCollector implements ScannedResultCollector {
-
-  private static final LogService LOGGER =
-      
LogServiceFactory.getLogService(ListBasedResultCollector.class.getName());
-
-  /**
-   * to keep a track of number of row processed to handle limit push down in
-   * case of detail query scenario
-   */
-  private int rowCounter;
-
-  /**
-   * dimension values list
-   */
-  private List<ListBasedResultWrapper> listBasedResult;
-
-  /**
-   * restructuring info
-   */
-  private KeyStructureInfo restructureInfos;
-
-  /**
-   * table block execution infos
-   */
-  private BlockExecutionInfo tableBlockExecutionInfos;
-
-  private int[] measuresOrdinal;
-
-  /**
-   * to check whether measure exists in current table block or not this to
-   * handle restructuring scenario
-   */
-  private boolean[] isMeasureExistsInCurrentBlock;
-
-  /**
-   * default value of the measures in case of restructuring some measure wont
-   * be present in the table so in that default value will be used to
-   * aggregate the data for that measure columns
-   */
-  private Object[] measureDefaultValue;
-
-  /**
-   * measure datatypes.
-   */
-  private DataType[] measureDatatypes;
-
-  public ListBasedResultCollector(BlockExecutionInfo blockExecutionInfos) {
-    this.tableBlockExecutionInfos = blockExecutionInfos;
-    restructureInfos = blockExecutionInfos.getKeyStructureInfo();
-    measuresOrdinal = 
tableBlockExecutionInfos.getAggregatorInfo().getMeasureOrdinals();
-    isMeasureExistsInCurrentBlock = 
tableBlockExecutionInfos.getAggregatorInfo().getMeasureExists();
-    measureDefaultValue = 
tableBlockExecutionInfos.getAggregatorInfo().getDefaultValues();
-    this.measureDatatypes = 
tableBlockExecutionInfos.getAggregatorInfo().getMeasureDataTypes();
-  }
-
-  @Override
-  /**
-   * This method will add a record both key and value to list object
-   * it will keep track of how many record is processed, to handle limit 
scenario
-   * @param scanned result
-   *
-   */
-  public int collectData(AbstractScannedResult scannedResult, int batchSize) {
-    this.listBasedResult =
-        new ArrayList<>(batchSize);
-    boolean isMsrsPresent = measureDatatypes.length > 0;
-    ByteArrayWrapper wrapper = null;
-    // scan the record and add to list
-    ListBasedResultWrapper resultWrapper;
-    int rowCounter = 0;
-    while (scannedResult.hasNext() && rowCounter < batchSize) {
-      resultWrapper = new ListBasedResultWrapper();
-      if(tableBlockExecutionInfos.isDimensionsExistInQuery()) {
-        wrapper = new ByteArrayWrapper();
-        wrapper.setDictionaryKey(scannedResult.getDictionaryKeyArray());
-        wrapper.setNoDictionaryKeys(scannedResult.getNoDictionaryKeyArray());
-        wrapper.setComplexTypesKeys(scannedResult.getComplexTypeKeyArray());
-        resultWrapper.setKey(wrapper);
-      } else {
-        scannedResult.incrementCounter();
-      }
-      if(isMsrsPresent) {
-        Object[] msrValues = new Object[measureDatatypes.length];
-        fillMeasureData(msrValues, scannedResult);
-        resultWrapper.setValue(msrValues);
-      }
-      listBasedResult.add(resultWrapper);
-      rowCounter++;
-    }
-    return rowCounter;
-  }
-
-  private void fillMeasureData(Object[] msrValues, AbstractScannedResult 
scannedResult) {
-    for (short i = 0; i < measuresOrdinal.length; i++) {
-      // if measure exists is block then pass measure column
-      // data chunk to the collector
-      if (isMeasureExistsInCurrentBlock[i]) {
-        msrValues[i] =
-            getMeasureData(scannedResult.getMeasureChunk(measuresOrdinal[i]),
-                scannedResult.getCurrenrRowId(),measureDatatypes[i]);
-      } else {
-        // if not then get the default value and use that value in aggregation
-        msrValues[i] = measureDefaultValue[i];
-      }
-    }
-  }
-
-  private Object getMeasureData(MeasureColumnDataChunk dataChunk, int index, 
DataType dataType) {
-    if (!dataChunk.getNullValueIndexHolder().getBitSet().get(index)) {
-      Object msrVal;
-      switch (dataType) {
-        case LONG:
-          msrVal = 
dataChunk.getMeasureDataHolder().getReadableLongValueByIndex(index);
-          break;
-        case DECIMAL:
-          msrVal = 
dataChunk.getMeasureDataHolder().getReadableBigDecimalValueByIndex(index);
-          break;
-        default:
-          msrVal = 
dataChunk.getMeasureDataHolder().getReadableDoubleValueByIndex(index);
-      }
-      return DataTypeUtil.getMeasureDataBasedOnDataType(msrVal, dataType);
-    }
-    return null;
-  }
-
-  /**
-   * Below method will used to get the result
-   */
-  @Override public Result getCollectedResult() {
-    Result<List<ListBasedResultWrapper>, Object> result = new 
ListBasedResult();
-    if (!tableBlockExecutionInfos.isFixedKeyUpdateRequired()) {
-      updateKeyWithLatestBlockKeyGenerator();
-    }
-    result.addScannedResult(listBasedResult);
-    return result;
-  }
-
-
-
-  /**
-   * Below method will be used to update the fixed length key with the
-   * latest block key generator
-   *
-   * @return updated block
-   */
-  private void updateKeyWithLatestBlockKeyGenerator() {
-    try {
-      long[] data = null;
-      ByteArrayWrapper key = null;
-      for (int i = 0; i < listBasedResult.size(); i++) {
-        // get the key
-        key = listBasedResult.get(i).getKey();
-        // unpack the key with table block key generator
-        data = tableBlockExecutionInfos.getBlockKeyGenerator()
-            .getKeyArray(key.getDictionaryKey(), 
tableBlockExecutionInfos.getMaskedByteForBlock());
-        // packed the key with latest block key generator
-        // and generate the masked key for that key
-        key.setDictionaryKey(QueryUtil
-            .getMaskedKey(restructureInfos.getKeyGenerator().generateKey(data),
-                restructureInfos.getMaxKey(), 
restructureInfos.getMaskByteRanges(),
-                restructureInfos.getMaskByteRanges().length));
-        listBasedResult.get(i).setKey(key);
-      }
-    } catch (KeyGenException e) {
-      LOGGER.error(e);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/query/carbon/executor/QueryExecutor.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/query/carbon/executor/QueryExecutor.java 
b/core/src/main/java/org/carbondata/query/carbon/executor/QueryExecutor.java
deleted file mode 100644
index 74713ba..0000000
--- a/core/src/main/java/org/carbondata/query/carbon/executor/QueryExecutor.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.query.carbon.executor;
-
-import org.carbondata.core.iterator.CarbonIterator;
-import org.carbondata.query.carbon.executor.exception.QueryExecutionException;
-import org.carbondata.query.carbon.model.QueryModel;
-
-/**
- * Interface for carbon query executor.
- * Will be used to execute the query based on the query model
- * and will return the iterator over query result
- */
-public interface QueryExecutor<E> {
-
-  /**
-   * Below method will be used to execute the query based on query model 
passed from driver
-   *
-   * @param queryModel query details
-   * @return query result iterator
-   * @throws QueryExecutionException if any failure while executing the query
-   */
-  CarbonIterator<E> execute(QueryModel queryModel) throws 
QueryExecutionException;
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/query/carbon/executor/QueryExecutorFactory.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/query/carbon/executor/QueryExecutorFactory.java
 
b/core/src/main/java/org/carbondata/query/carbon/executor/QueryExecutorFactory.java
deleted file mode 100644
index fd1cf40..0000000
--- 
a/core/src/main/java/org/carbondata/query/carbon/executor/QueryExecutorFactory.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.query.carbon.executor;
-
-import org.carbondata.query.carbon.executor.impl.DetailQueryExecutor;
-import org.carbondata.query.carbon.executor.impl.DetailRawRecordQueryExecutor;
-import org.carbondata.query.carbon.model.QueryModel;
-
-/**
- * Factory class to get the query executor from RDD
- * This will return the executor based on query type
- */
-public class QueryExecutorFactory {
-
-  public static QueryExecutor getQueryExecutor(QueryModel queryModel) {
-    if (queryModel.isForcedDetailRawQuery()) {
-      return new DetailRawRecordQueryExecutor();
-    } else {
-      return new DetailQueryExecutor();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/query/carbon/executor/exception/QueryExecutionException.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/query/carbon/executor/exception/QueryExecutionException.java
 
b/core/src/main/java/org/carbondata/query/carbon/executor/exception/QueryExecutionException.java
deleted file mode 100644
index 836009b..0000000
--- 
a/core/src/main/java/org/carbondata/query/carbon/executor/exception/QueryExecutionException.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.query.carbon.executor.exception;
-
-import java.util.Locale;
-
-/**
- * Exception class for query execution
- *
- * @author Administrator
- */
-public class QueryExecutionException extends Exception {
-
-  /**
-   * default serial version ID.
-   */
-  private static final long serialVersionUID = 1L;
-
-  /**
-   * The Error message.
-   */
-  private String msg = "";
-
-  /**
-   * Constructor
-   *
-   * @param errorCode The error code for this exception.
-   * @param msg       The error message for this exception.
-   */
-  public QueryExecutionException(String msg) {
-    super(msg);
-    this.msg = msg;
-  }
-
-  /**
-   * Constructor
-   *
-   * @param errorCode The error code for this exception.
-   * @param msg       The error message for this exception.
-   */
-  public QueryExecutionException(String msg, Throwable t) {
-    super(msg, t);
-    this.msg = msg;
-  }
-
-  /**
-   * Constructor
-   *
-   * @param t
-   */
-  public QueryExecutionException(Throwable t) {
-    super(t);
-  }
-
-  /**
-   * This method is used to get the localized message.
-   *
-   * @param locale - A Locale object represents a specific geographical,
-   *               political, or cultural region.
-   * @return - Localized error message.
-   */
-  public String getLocalizedMessage(Locale locale) {
-    return "";
-  }
-
-  /**
-   * getLocalizedMessage
-   */
-  @Override public String getLocalizedMessage() {
-    return super.getLocalizedMessage();
-  }
-
-  /**
-   * getMessage
-   */
-  public String getMessage() {
-    return this.msg;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/query/carbon/executor/impl/AbstractQueryExecutor.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/query/carbon/executor/impl/AbstractQueryExecutor.java
 
b/core/src/main/java/org/carbondata/query/carbon/executor/impl/AbstractQueryExecutor.java
deleted file mode 100644
index 37c4b68..0000000
--- 
a/core/src/main/java/org/carbondata/query/carbon/executor/impl/AbstractQueryExecutor.java
+++ /dev/null
@@ -1,403 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.query.carbon.executor.impl;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Set;
-
-import org.carbondata.common.logging.LogService;
-import org.carbondata.common.logging.LogServiceFactory;
-import org.carbondata.common.logging.impl.StandardLogService;
-import org.carbondata.core.carbon.datastore.BlockIndexStore;
-import org.carbondata.core.carbon.datastore.IndexKey;
-import org.carbondata.core.carbon.datastore.block.AbstractIndex;
-import org.carbondata.core.carbon.datastore.block.SegmentProperties;
-import org.carbondata.core.carbon.datastore.exception.IndexBuilderException;
-import org.carbondata.core.carbon.metadata.datatype.DataType;
-import org.carbondata.core.carbon.metadata.encoder.Encoding;
-import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
-import org.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
-import org.carbondata.core.constants.CarbonCommonConstants;
-import org.carbondata.core.datastorage.store.impl.FileFactory;
-import org.carbondata.core.keygenerator.KeyGenException;
-import org.carbondata.core.keygenerator.KeyGenerator;
-import org.carbondata.core.util.CarbonUtil;
-import org.carbondata.query.carbon.executor.QueryExecutor;
-import org.carbondata.query.carbon.executor.exception.QueryExecutionException;
-import org.carbondata.query.carbon.executor.infos.AggregatorInfo;
-import org.carbondata.query.carbon.executor.infos.BlockExecutionInfo;
-import org.carbondata.query.carbon.executor.infos.KeyStructureInfo;
-import org.carbondata.query.carbon.executor.infos.SortInfo;
-import org.carbondata.query.carbon.executor.util.QueryUtil;
-import org.carbondata.query.carbon.executor.util.RestructureUtil;
-import org.carbondata.query.carbon.model.QueryDimension;
-import org.carbondata.query.carbon.model.QueryMeasure;
-import org.carbondata.query.carbon.model.QueryModel;
-import org.carbondata.query.filters.measurefilter.util.FilterUtil;
-
-import org.apache.commons.lang3.ArrayUtils;
-
-/**
- * This class provides a skeletal implementation of the {@link QueryExecutor}
- * interface to minimize the effort required to implement this interface. This
- * will be used to prepare all the properties required for query execution
- */
-public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
-
-  private static final LogService LOGGER =
-      LogServiceFactory.getLogService(AbstractQueryExecutor.class.getName());
-  /**
-   * holder for query properties which will be used to execute the query
-   */
-  protected QueryExecutorProperties queryProperties;
-
-  public AbstractQueryExecutor() {
-    queryProperties = new QueryExecutorProperties();
-  }
-
-  /**
-   * Below method will be used to fill the executor properties based on query
-   * model it will parse the query model and get the detail and fill it in
-   * query properties
-   *
-   * @param queryModel
-   */
-  protected void initQuery(QueryModel queryModel) throws 
QueryExecutionException {
-    StandardLogService.setThreadName(StandardLogService.getPartitionID(
-        
queryModel.getAbsoluteTableIdentifier().getCarbonTableIdentifier().getTableName()),
-        queryModel.getQueryId());
-    LOGGER.info("Query will be executed on table: " + 
queryModel.getAbsoluteTableIdentifier()
-        .getCarbonTableIdentifier().getTableName());
-
-    QueryUtil.resolveQueryModel(queryModel);
-
-    // get the table blocks
-    try {
-      queryProperties.dataBlocks = BlockIndexStore.getInstance()
-          .loadAndGetBlocks(queryModel.getTableBlockInfos(),
-              queryModel.getAbsoluteTableIdentifier());
-    } catch (IndexBuilderException e) {
-      throw new QueryExecutionException(e);
-    }
-    //
-    // // updating the restructuring infos for the query
-    queryProperties.keyStructureInfo = getKeyStructureInfo(queryModel,
-        queryProperties.dataBlocks.get(queryProperties.dataBlocks.size() - 
1).getSegmentProperties()
-            .getDimensionKeyGenerator());
-
-    // calculating the total number of aggeragted columns
-    int aggTypeCount = queryModel.getQueryMeasures().size();
-
-    int currentIndex = 0;
-    String[] aggTypes = new String[aggTypeCount];
-    DataType[] dataTypes = new DataType[aggTypeCount];
-
-    for (QueryMeasure carbonMeasure : queryModel.getQueryMeasures()) {
-      // adding the data type and aggregation type of all the measure this
-      // can be used
-      // to select the aggregator
-      aggTypes[currentIndex] = carbonMeasure.getAggregateFunction();
-      dataTypes[currentIndex] = carbonMeasure.getMeasure().getDataType();
-      currentIndex++;
-    }
-    queryProperties.measureDataTypes = dataTypes;
-    // as aggregation will be executed in following order
-    // 1.aggregate dimension expression
-    // 2. expression
-    // 3. query measure
-    // so calculating the index of the expression start index
-    // and measure column start index
-    queryProperties.aggExpressionStartIndex = 
queryModel.getQueryMeasures().size();
-    queryProperties.measureStartIndex = aggTypes.length - 
queryModel.getQueryMeasures().size();
-
-    // dictionary column unique column id to dictionary mapping
-    // which will be used to get column actual data
-    queryProperties.columnToDictionayMapping = QueryUtil
-        .getDimensionDictionaryDetail(queryModel.getQueryDimension(),
-            queryModel.getAbsoluteTableIdentifier());
-    
queryModel.setColumnToDictionaryMapping(queryProperties.columnToDictionayMapping);
-    // setting the sort dimension index. as it will be updated while getting 
the sort info
-    // so currently setting it to default 0 means sort is not present in any 
dimension
-    queryProperties.sortDimIndexes = new 
byte[queryModel.getQueryDimension().size()];
-  }
-
-  /**
-   * Below method will be used to get the key structure info for the uqery
-   *
-   * @param queryModel   query model
-   * @param keyGenerator
-   * @return key structure info
-   */
-  private KeyStructureInfo getKeyStructureInfo(QueryModel queryModel, 
KeyGenerator keyGenerator) {
-    // getting the masked byte range for dictionary column
-    int[] maskByteRanges =
-        QueryUtil.getMaskedByteRange(queryModel.getQueryDimension(), 
keyGenerator);
-
-    // getting the masked bytes for query dimension dictionary column
-    int[] maskedBytes = 
QueryUtil.getMaskedByte(keyGenerator.getKeySizeInBytes(), maskByteRanges);
-
-    // max key for the dictionary dimension present in the query
-    byte[] maxKey = null;
-    try {
-      // getting the max key which will be used to masked and get the
-      // masked key
-      maxKey = 
QueryUtil.getMaxKeyBasedOnDimensions(queryModel.getQueryDimension(), 
keyGenerator);
-    } catch (KeyGenException e) {
-      LOGGER.error(e, "problem while getting the max key");
-    }
-
-    KeyStructureInfo restructureInfos = new KeyStructureInfo();
-    restructureInfos.setKeyGenerator(keyGenerator);
-    restructureInfos.setMaskByteRanges(maskByteRanges);
-    restructureInfos.setMaskedBytes(maskedBytes);
-    restructureInfos.setMaxKey(maxKey);
-    return restructureInfos;
-  }
-
-  protected List<BlockExecutionInfo> getBlockExecutionInfos(QueryModel 
queryModel)
-      throws QueryExecutionException {
-    initQuery(queryModel);
-    List<BlockExecutionInfo> blockExecutionInfoList = new 
ArrayList<BlockExecutionInfo>();
-    // fill all the block execution infos for all the blocks selected in
-    // query
-    // and query will be executed based on that infos
-    for (int i = 0; i < queryProperties.dataBlocks.size(); i++) {
-      blockExecutionInfoList
-          .add(getBlockExecutionInfoForBlock(queryModel, 
queryProperties.dataBlocks.get(i)));
-    }
-    return blockExecutionInfoList;
-  }
-
-  /**
-   * Below method will be used to get the block execution info which is
-   * required to execute any block  based on query model
-   *
-   * @param queryModel query model from user query
-   * @param blockIndex block index
-   * @return block execution info
-   * @throws QueryExecutionException any failure during block info creation
-   */
-  protected BlockExecutionInfo getBlockExecutionInfoForBlock(QueryModel 
queryModel,
-      AbstractIndex blockIndex) throws QueryExecutionException {
-    BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
-    SegmentProperties segmentProperties = blockIndex.getSegmentProperties();
-    List<CarbonDimension> tableBlockDimensions = 
segmentProperties.getDimensions();
-    KeyGenerator blockKeyGenerator = 
segmentProperties.getDimensionKeyGenerator();
-
-    // below is to get only those dimension in query which is present in the
-    // table block
-    List<QueryDimension> updatedQueryDimension = RestructureUtil
-        .getUpdatedQueryDimension(queryModel.getQueryDimension(), 
tableBlockDimensions);
-    // TODO add complex dimension children
-    int[] maskByteRangesForBlock =
-        QueryUtil.getMaskedByteRange(updatedQueryDimension, blockKeyGenerator);
-    int[] maksedByte =
-        QueryUtil.getMaskedByte(blockKeyGenerator.getKeySizeInBytes(), 
maskByteRangesForBlock);
-    blockExecutionInfo.setDimensionsExistInQuery(updatedQueryDimension.size() 
> 0);
-    blockExecutionInfo.setDataBlock(blockIndex);
-    blockExecutionInfo.setBlockKeyGenerator(blockKeyGenerator);
-    // adding aggregation info for query
-    blockExecutionInfo.setAggregatorInfo(getAggregatorInfoForBlock(queryModel, 
blockIndex));
-
-    // setting the limit
-    blockExecutionInfo.setLimit(queryModel.getLimit());
-    // setting whether detail query or not
-    blockExecutionInfo.setDetailQuery(queryModel.isDetailQuery());
-    // setting whether raw record query or not
-    
blockExecutionInfo.setRawRecordDetailQuery(queryModel.isForcedDetailRawQuery());
-    // setting the masked byte of the block which will be
-    // used to update the unpack the older block keys
-    blockExecutionInfo.setMaskedByteForBlock(maksedByte);
-    // total number dimension
-    blockExecutionInfo
-        
.setTotalNumberDimensionBlock(segmentProperties.getDimensionOrdinalToBlockMapping().size());
-    blockExecutionInfo
-        
.setTotalNumberOfMeasureBlock(segmentProperties.getMeasuresOrdinalToBlockMapping().size());
-    // to check whether older block key update is required or not
-    blockExecutionInfo.setFixedKeyUpdateRequired(
-        
blockKeyGenerator.equals(queryProperties.keyStructureInfo.getKeyGenerator()));
-    IndexKey startIndexKey = null;
-    IndexKey endIndexKey = null;
-    if (null != queryModel.getFilterExpressionResolverTree()) {
-      // loading the filter executer tree for filter evaluation
-      blockExecutionInfo.setFilterExecuterTree(FilterUtil
-          .getFilterExecuterTree(queryModel.getFilterExpressionResolverTree(), 
segmentProperties));
-      List<IndexKey> listOfStartEndKeys = new ArrayList<IndexKey>(2);
-      FilterUtil.traverseResolverTreeAndGetStartAndEndKey(segmentProperties,
-          queryModel.getAbsoluteTableIdentifier(), 
queryModel.getFilterExpressionResolverTree(),
-          listOfStartEndKeys);
-      startIndexKey = listOfStartEndKeys.get(0);
-      endIndexKey = listOfStartEndKeys.get(1);
-    } else {
-      try {
-        startIndexKey = 
FilterUtil.prepareDefaultStartIndexKey(segmentProperties);
-        endIndexKey = FilterUtil.prepareDefaultEndIndexKey(segmentProperties);
-      } catch (KeyGenException e) {
-        throw new QueryExecutionException(e);
-      }
-    }
-    blockExecutionInfo.setFileType(
-        
FileFactory.getFileType(queryModel.getAbsoluteTableIdentifier().getStorePath()));
-    //setting the start index key of the block node
-    blockExecutionInfo.setStartKey(startIndexKey);
-    //setting the end index key of the block node
-    blockExecutionInfo.setEndKey(endIndexKey);
-    // expression dimensions
-    List<CarbonDimension> expressionDimensions =
-        new 
ArrayList<CarbonDimension>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-    // expression measure
-    List<CarbonMeasure> expressionMeasures =
-        new 
ArrayList<CarbonMeasure>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-    // setting all the dimension chunk indexes to be read from file
-    blockExecutionInfo.setAllSelectedDimensionBlocksIndexes(QueryUtil
-        .getDimensionsBlockIndexes(updatedQueryDimension,
-            segmentProperties.getDimensionOrdinalToBlockMapping(), 
expressionDimensions));
-    // setting all the measure chunk indexes to be read from file
-    blockExecutionInfo.setAllSelectedMeasureBlocksIndexes(QueryUtil
-        .getMeasureBlockIndexes(queryModel.getQueryMeasures(), 
expressionMeasures,
-            segmentProperties.getMeasuresOrdinalToBlockMapping()));
-    // setting the key structure info which will be required
-    // to update the older block key with new key generator
-    blockExecutionInfo.setKeyStructureInfo(queryProperties.keyStructureInfo);
-    // setting the size of fixed key column (dictionary column)
-    blockExecutionInfo.setFixedLengthKeySize(getKeySize(updatedQueryDimension, 
segmentProperties));
-    Set<Integer> dictionaryColumnBlockIndex = new HashSet<Integer>();
-    List<Integer> noDictionaryColumnBlockIndex = new ArrayList<Integer>();
-    // get the block index to be read from file for query dimension
-    // for both dictionary columns and no dictionary columns
-    QueryUtil.fillQueryDimensionsBlockIndexes(updatedQueryDimension,
-        segmentProperties.getDimensionOrdinalToBlockMapping(), 
dictionaryColumnBlockIndex,
-        noDictionaryColumnBlockIndex);
-    int[] queryDictionaruColumnBlockIndexes = ArrayUtils.toPrimitive(
-        dictionaryColumnBlockIndex.toArray(new 
Integer[dictionaryColumnBlockIndex.size()]));
-    // need to sort the dictionary column as for all dimension
-    // column key will be filled based on key order
-    Arrays.sort(queryDictionaruColumnBlockIndexes);
-    
blockExecutionInfo.setDictionaryColumnBlockIndex(queryDictionaruColumnBlockIndexes);
-    // setting the no dictionary column block indexes
-    blockExecutionInfo.setNoDictionaryBlockIndexes(ArrayUtils.toPrimitive(
-        noDictionaryColumnBlockIndex.toArray(new 
Integer[noDictionaryColumnBlockIndex.size()])));
-    // setting column id to dictionary mapping
-    
blockExecutionInfo.setColumnIdToDcitionaryMapping(queryProperties.columnToDictionayMapping);
-    // setting each column value size
-    
blockExecutionInfo.setEachColumnValueSize(segmentProperties.getEachDimColumnValueSize());
-    try {
-      // to set column group and its key structure info which will be used
-      // to
-      // for getting the column group column data in case of final row
-      // and in case of dimension aggregation
-      blockExecutionInfo.setColumnGroupToKeyStructureInfo(
-          QueryUtil.getColumnGroupKeyStructureInfo(updatedQueryDimension, 
segmentProperties));
-    } catch (KeyGenException e) {
-      throw new QueryExecutionException(e);
-    }
-    return blockExecutionInfo;
-  }
-
-  /**
-   * This method will be used to get fixed key length size this will be used
-   * to create a row from column chunk
-   *
-   * @param queryDimension    query dimension
-   * @param blockMetadataInfo block metadata info
-   * @return key size
-   */
-  private int getKeySize(List<QueryDimension> queryDimension, 
SegmentProperties blockMetadataInfo) {
-    List<Integer> fixedLengthDimensionOrdinal =
-        new ArrayList<Integer>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-    int counter = 0;
-    while (counter < queryDimension.size()) {
-      if (queryDimension.get(counter).getDimension().numberOfChild() > 0) {
-        counter += queryDimension.get(counter).getDimension().numberOfChild();
-        continue;
-      } else if 
(!CarbonUtil.hasEncoding(queryDimension.get(counter).getDimension().getEncoder(),
-          Encoding.DICTIONARY)) {
-        counter++;
-      } else {
-        
fixedLengthDimensionOrdinal.add(queryDimension.get(counter).getDimension().getKeyOrdinal());
-        counter++;
-      }
-    }
-    int[] dictioanryColumnOrdinal = ArrayUtils.toPrimitive(
-        fixedLengthDimensionOrdinal.toArray(new 
Integer[fixedLengthDimensionOrdinal.size()]));
-    if (dictioanryColumnOrdinal.length > 0) {
-      return blockMetadataInfo.getFixedLengthKeySplitter()
-          .getKeySizeByBlock(dictioanryColumnOrdinal);
-    }
-    return 0;
-  }
-
-  /**
-   * Below method will be used to get the sort information which will be
-   * required during sorting the data on dimension column
-   *
-   * @param queryModel query model
-   * @return Sort infos
-   * @throws QueryExecutionException if problem while
-   */
-  protected SortInfo getSortInfos(QueryModel queryModel) throws 
QueryExecutionException {
-
-    // get the masked by range for order by dimension
-    int[][] maskedByteRangeForSorting = QueryUtil
-        .getMaskedByteRangeForSorting(queryModel.getSortDimension(),
-            queryProperties.keyStructureInfo.getKeyGenerator(),
-            queryProperties.keyStructureInfo.getMaskByteRanges());
-    // get masked key for sorting
-    byte[][] maksedKeyForSorting = 
QueryUtil.getMaksedKeyForSorting(queryModel.getSortDimension(),
-        queryProperties.keyStructureInfo.getKeyGenerator(), 
maskedByteRangeForSorting,
-        queryProperties.keyStructureInfo.getMaskByteRanges());
-    // fill sort dimension indexes
-    queryProperties.sortDimIndexes = QueryUtil
-        .getSortDimensionIndexes(queryModel.getSortDimension(), 
queryModel.getQueryDimension());
-    SortInfo sortInfos = new SortInfo();
-    sortInfos.setDimensionMaskKeyForSorting(maksedKeyForSorting);
-    sortInfos.setDimensionSortOrder(queryModel.getSortOrder());
-    sortInfos.setMaskedByteRangeForSorting(maskedByteRangeForSorting);
-    sortInfos.setSortDimensionIndex(queryProperties.sortDimIndexes);
-    sortInfos.setSortDimension(queryModel.getSortDimension());
-    return sortInfos;
-  }
-
-  /**
-   * Below method will be used to get the aggrgator info for the query
-   *
-   * @param queryModel query model
-   * @param tableBlock table block
-   * @return aggregator info
-   */
-  private AggregatorInfo getAggregatorInfoForBlock(QueryModel queryModel,
-      AbstractIndex tableBlock) {
-    // getting the aggregate infos which will be used during aggregation
-    AggregatorInfo aggregatorInfos = RestructureUtil
-        .getAggregatorInfos(queryModel.getQueryMeasures(),
-            tableBlock.getSegmentProperties().getMeasures());
-    // setting the index of expression in measure aggregators
-    
aggregatorInfos.setExpressionAggregatorStartIndex(queryProperties.aggExpressionStartIndex);
-    // setting the index of measure columns in measure aggregators
-    
aggregatorInfos.setMeasureAggregatorStartIndex(queryProperties.measureStartIndex);
-    // setting the measure aggregator for all aggregation function selected
-    // in query
-    aggregatorInfos.setMeasureDataTypes(queryProperties.measureDataTypes);
-    return aggregatorInfos;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/query/carbon/executor/impl/DetailQueryExecutor.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/query/carbon/executor/impl/DetailQueryExecutor.java
 
b/core/src/main/java/org/carbondata/query/carbon/executor/impl/DetailQueryExecutor.java
deleted file mode 100644
index 0255cbb..0000000
--- 
a/core/src/main/java/org/carbondata/query/carbon/executor/impl/DetailQueryExecutor.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.query.carbon.executor.impl;
-
-import java.util.List;
-
-import org.carbondata.core.iterator.CarbonIterator;
-import org.carbondata.query.carbon.executor.exception.QueryExecutionException;
-import org.carbondata.query.carbon.executor.infos.BlockExecutionInfo;
-import org.carbondata.query.carbon.model.QueryModel;
-import org.carbondata.query.carbon.result.iterator.ChunkRowIterator;
-import org.carbondata.query.carbon.result.iterator.DetailQueryResultIterator;
-import 
org.carbondata.query.carbon.result.preparator.impl.DetailQueryResultPreparatorImpl;
-
-/**
- * Below class will be used to execute the detail query
- * For executing the detail query it will pass all the block execution
- * info to detail query result iterator and iterator will be returned
- */
-public class DetailQueryExecutor extends AbstractQueryExecutor {
-
-  @Override public CarbonIterator<Object[]> execute(QueryModel queryModel)
-      throws QueryExecutionException {
-    List<BlockExecutionInfo> blockExecutionInfoList = 
getBlockExecutionInfos(queryModel);
-    return new ChunkRowIterator(
-        new DetailQueryResultIterator(blockExecutionInfoList, queryModel,
-            new DetailQueryResultPreparatorImpl(queryProperties, queryModel)));
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/query/carbon/executor/impl/DetailRawRecordQueryExecutor.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/query/carbon/executor/impl/DetailRawRecordQueryExecutor.java
 
b/core/src/main/java/org/carbondata/query/carbon/executor/impl/DetailRawRecordQueryExecutor.java
deleted file mode 100644
index e72c638..0000000
--- 
a/core/src/main/java/org/carbondata/query/carbon/executor/impl/DetailRawRecordQueryExecutor.java
+++ /dev/null
@@ -1,24 +0,0 @@
-package org.carbondata.query.carbon.executor.impl;
-
-import java.util.List;
-
-import org.carbondata.core.iterator.CarbonIterator;
-import org.carbondata.query.carbon.executor.exception.QueryExecutionException;
-import org.carbondata.query.carbon.executor.infos.BlockExecutionInfo;
-import org.carbondata.query.carbon.model.QueryModel;
-import org.carbondata.query.carbon.result.BatchResult;
-import org.carbondata.query.carbon.result.iterator.DetailQueryResultIterator;
-import 
org.carbondata.query.carbon.result.preparator.impl.RawQueryResultPreparatorImpl;
-
-/**
- * Executor for raw records, it does not parse to actual data
- */
-public class DetailRawRecordQueryExecutor extends 
AbstractQueryExecutor<BatchResult> {
-
-  @Override public CarbonIterator<BatchResult> execute(QueryModel queryModel)
-      throws QueryExecutionException {
-    List<BlockExecutionInfo> blockExecutionInfoList = 
getBlockExecutionInfos(queryModel);
-    return new DetailQueryResultIterator(blockExecutionInfoList, queryModel,
-        new RawQueryResultPreparatorImpl(queryProperties, queryModel));
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/1c725f5b/core/src/main/java/org/carbondata/query/carbon/executor/impl/QueryExecutorProperties.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/carbondata/query/carbon/executor/impl/QueryExecutorProperties.java
 
b/core/src/main/java/org/carbondata/query/carbon/executor/impl/QueryExecutorProperties.java
deleted file mode 100644
index ff42fbc..0000000
--- 
a/core/src/main/java/org/carbondata/query/carbon/executor/impl/QueryExecutorProperties.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.carbondata.query.carbon.executor.impl;
-
-import java.util.List;
-import java.util.Map;
-
-import org.carbondata.core.cache.dictionary.Dictionary;
-import org.carbondata.core.carbon.datastore.block.AbstractIndex;
-import org.carbondata.core.carbon.metadata.datatype.DataType;
-import org.carbondata.query.carbon.executor.infos.KeyStructureInfo;
-
-/**
- * Holds all the properties required for query execution
- */
-public class QueryExecutorProperties {
-
-  /**
-   * list of blocks in which query will be executed
-   */
-  protected List<AbstractIndex> dataBlocks;
-
-  /**
-   * holds the information required for updating the order block
-   * dictionary key
-   */
-  public KeyStructureInfo keyStructureInfo;
-
-  /**
-   * as we have multiple type of column aggregation like
-   * dimension,expression,measure so this will be used to for getting the
-   * measure aggregation start index
-   */
-  public int measureStartIndex;
-
-  /**
-   * query like count(1),count(*) ,etc will used this parameter
-   */
-  public boolean isFunctionQuery;
-
-  /**
-   * aggExpressionStartIndex
-   */
-  public int aggExpressionStartIndex;
-
-  /**
-   * index of the dimension which is present in the order by
-   * in a query
-   */
-  public byte[] sortDimIndexes;
-
-  /**
-   * this will hold the information about the dictionary dimension
-   * which to
-   */
-  public Map<String, Dictionary> columnToDictionayMapping;
-
-  /**
-   * Measure datatypes
-   */
-  public DataType[] measureDataTypes;
-}

Reply via email to