http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/FileFactory.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/FileFactory.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/FileFactory.java
deleted file mode 100644
index f0c424b..0000000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/FileFactory.java
+++ /dev/null
@@ -1,485 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.datastorage.store.impl;
-
-import java.io.BufferedInputStream;
-import java.io.BufferedOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
-import java.util.zip.GZIPInputStream;
-
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-import 
org.apache.carbondata.core.datastorage.store.filesystem.AlluxioCarbonFile;
-import org.apache.carbondata.core.datastorage.store.filesystem.CarbonFile;
-import org.apache.carbondata.core.datastorage.store.filesystem.HDFSCarbonFile;
-import org.apache.carbondata.core.datastorage.store.filesystem.LocalCarbonFile;
-import 
org.apache.carbondata.core.datastorage.store.filesystem.ViewFSCarbonFile;
-import org.apache.carbondata.core.util.CarbonUtil;
-
-import 
org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream;
-import org.apache.commons.io.FileUtils;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.compress.BZip2Codec;
-import org.apache.hadoop.io.compress.GzipCodec;
-
-public final class FileFactory {
-  private static Configuration configuration = null;
-
-  static {
-    configuration = new Configuration();
-    configuration.addResource(new Path("../core-default.xml"));
-  }
-
-  private FileFactory() {
-
-  }
-
-  public static Configuration getConfiguration() {
-    return configuration;
-  }
-
-  public static FileHolder getFileHolder(FileType fileType) {
-    switch (fileType) {
-      case LOCAL:
-        return new FileHolderImpl();
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        return new DFSFileHolderImpl();
-      default:
-        return new FileHolderImpl();
-    }
-  }
-
-  public static FileType getFileType(String path) {
-    if (path.startsWith(CarbonUtil.HDFS_PREFIX)) {
-      return FileType.HDFS;
-    }
-    else if (path.startsWith(CarbonUtil.ALLUXIO_PREFIX)) {
-      return FileType.ALLUXIO;
-    }
-    else if (path.startsWith(CarbonUtil.VIEWFS_PREFIX)) {
-      return FileType.VIEWFS;
-    }
-    return FileType.LOCAL;
-  }
-
-  public static CarbonFile getCarbonFile(String path, FileType fileType) {
-    switch (fileType) {
-      case LOCAL:
-        return new LocalCarbonFile(path);
-      case HDFS:
-        return new HDFSCarbonFile(path);
-      case ALLUXIO:
-        return new AlluxioCarbonFile(path);
-      case VIEWFS:
-        return new ViewFSCarbonFile(path);
-      default:
-        return new LocalCarbonFile(path);
-    }
-  }
-
-  public static DataInputStream getDataInputStream(String path, FileType 
fileType)
-      throws IOException {
-    return getDataInputStream(path, fileType, -1);
-  }
-
-  public static DataInputStream getDataInputStream(String path, FileType 
fileType, int bufferSize)
-      throws IOException {
-    path = path.replace("\\", "/");
-    boolean gzip = path.endsWith(".gz");
-    boolean bzip2 = path.endsWith(".bz2");
-    InputStream stream;
-    switch (fileType) {
-      case LOCAL:
-        if (gzip) {
-          stream = new GZIPInputStream(new FileInputStream(path));
-        } else if (bzip2) {
-          stream = new BZip2CompressorInputStream(new FileInputStream(path));
-        } else {
-          stream = new FileInputStream(path);
-        }
-        break;
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path pt = new Path(path);
-        FileSystem fs = pt.getFileSystem(configuration);
-        if (bufferSize == -1) {
-          stream = fs.open(pt);
-        } else {
-          stream = fs.open(pt, bufferSize);
-        }
-        if (gzip) {
-          GzipCodec codec = new GzipCodec();
-          stream = codec.createInputStream(stream);
-        } else if (bzip2) {
-          BZip2Codec codec = new BZip2Codec();
-          stream = codec.createInputStream(stream);
-        }
-        break;
-      default:
-        throw new UnsupportedOperationException("unsupported file system");
-    }
-    return new DataInputStream(new BufferedInputStream(stream));
-  }
-
-  /**
-   * return the datainputStream which is seek to the offset of file
-   *
-   * @param path
-   * @param fileType
-   * @param bufferSize
-   * @param offset
-   * @return DataInputStream
-   * @throws IOException
-   */
-  public static DataInputStream getDataInputStream(String path, FileType 
fileType, int bufferSize,
-      long offset) throws IOException {
-    path = path.replace("\\", "/");
-    switch (fileType) {
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path pt = new Path(path);
-        FileSystem fs = pt.getFileSystem(configuration);
-        FSDataInputStream stream = fs.open(pt, bufferSize);
-        stream.seek(offset);
-        return new DataInputStream(new BufferedInputStream(stream));
-      default:
-        FileInputStream fis = new FileInputStream(path);
-        long actualSkipSize = 0;
-        long skipSize = offset;
-        while (actualSkipSize != offset) {
-          actualSkipSize += fis.skip(skipSize);
-          skipSize = skipSize - actualSkipSize;
-        }
-        return new DataInputStream(new BufferedInputStream(fis));
-    }
-  }
-
-  public static DataOutputStream getDataOutputStream(String path, FileType 
fileType)
-      throws IOException {
-    path = path.replace("\\", "/");
-    switch (fileType) {
-      case LOCAL:
-        return new DataOutputStream(new BufferedOutputStream(new 
FileOutputStream(path)));
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path pt = new Path(path);
-        FileSystem fs = pt.getFileSystem(configuration);
-        FSDataOutputStream stream = fs.create(pt, true);
-        return stream;
-      default:
-        return new DataOutputStream(new BufferedOutputStream(new 
FileOutputStream(path)));
-    }
-  }
-
-  public static DataOutputStream getDataOutputStream(String path, FileType 
fileType, int bufferSize,
-      boolean append) throws IOException {
-    path = path.replace("\\", "/");
-    switch (fileType) {
-      case LOCAL:
-        return new DataOutputStream(
-            new BufferedOutputStream(new FileOutputStream(path, append), 
bufferSize));
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path pt = new Path(path);
-        FileSystem fs = pt.getFileSystem(configuration);
-        FSDataOutputStream stream = null;
-        if (append) {
-          // append to a file only if file already exists else file not found
-          // exception will be thrown by hdfs
-          if (CarbonUtil.isFileExists(path)) {
-            stream = fs.append(pt, bufferSize);
-          } else {
-            stream = fs.create(pt, true, bufferSize);
-          }
-        } else {
-          stream = fs.create(pt, true, bufferSize);
-        }
-        return stream;
-      default:
-        return new DataOutputStream(
-            new BufferedOutputStream(new FileOutputStream(path), bufferSize));
-    }
-  }
-
-  public static DataOutputStream getDataOutputStream(String path, FileType 
fileType, int bufferSize,
-      long blockSize) throws IOException {
-    path = path.replace("\\", "/");
-    switch (fileType) {
-      case LOCAL:
-        return new DataOutputStream(
-            new BufferedOutputStream(new FileOutputStream(path), bufferSize));
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path pt = new Path(path);
-        FileSystem fs = pt.getFileSystem(configuration);
-        FSDataOutputStream stream =
-            fs.create(pt, true, bufferSize, fs.getDefaultReplication(pt), 
blockSize);
-        return stream;
-      default:
-        return new DataOutputStream(
-            new BufferedOutputStream(new FileOutputStream(path), bufferSize));
-    }
-  }
-
-  /**
-   * This method checks the given path exists or not and also is it file or
-   * not if the performFileCheck is true
-   *
-   * @param filePath         - Path
-   * @param fileType         - FileType Local/HDFS
-   * @param performFileCheck - Provide false for folders, true for files and
-   */
-  public static boolean isFileExist(String filePath, FileType fileType, 
boolean performFileCheck)
-      throws IOException {
-    filePath = filePath.replace("\\", "/");
-    switch (fileType) {
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path path = new Path(filePath);
-        FileSystem fs = path.getFileSystem(configuration);
-        if (performFileCheck) {
-          return fs.exists(path) && fs.isFile(path);
-        } else {
-          return fs.exists(path);
-        }
-
-      case LOCAL:
-      default:
-        File defaultFile = new File(filePath);
-
-        if (performFileCheck) {
-          return defaultFile.exists() && defaultFile.isFile();
-        } else {
-          return defaultFile.exists();
-        }
-    }
-  }
-
-  /**
-   * This method checks the given path exists or not and also is it file or
-   * not if the performFileCheck is true
-   *
-   * @param filePath - Path
-   * @param fileType - FileType Local/HDFS
-   */
-  public static boolean isFileExist(String filePath, FileType fileType) throws 
IOException {
-    filePath = filePath.replace("\\", "/");
-    switch (fileType) {
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path path = new Path(filePath);
-        FileSystem fs = path.getFileSystem(configuration);
-        return fs.exists(path);
-
-      case LOCAL:
-      default:
-        File defaultFile = new File(filePath);
-        return defaultFile.exists();
-    }
-  }
-
-  public static boolean createNewFile(String filePath, FileType fileType) 
throws IOException {
-    filePath = filePath.replace("\\", "/");
-    switch (fileType) {
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path path = new Path(filePath);
-        FileSystem fs = path.getFileSystem(configuration);
-        return fs.createNewFile(path);
-
-      case LOCAL:
-      default:
-        File file = new File(filePath);
-        return file.createNewFile();
-    }
-  }
-
-  public static boolean deleteFile(String filePath, FileType fileType) throws 
IOException {
-    filePath = filePath.replace("\\", "/");
-    switch (fileType) {
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path path = new Path(filePath);
-        FileSystem fs = path.getFileSystem(configuration);
-        return fs.delete(path, true);
-
-      case LOCAL:
-      default:
-        File file = new File(filePath);
-        return deleteAllFilesOfDir(file);
-    }
-  }
-
-  public static boolean deleteAllFilesOfDir(File path) {
-    if (!path.exists()) {
-      return true;
-    }
-    if (path.isFile()) {
-      return path.delete();
-    }
-    File[] files = path.listFiles();
-    for (int i = 0; i < files.length; i++) {
-      deleteAllFilesOfDir(files[i]);
-    }
-    return path.delete();
-  }
-
-  public static boolean mkdirs(String filePath, FileType fileType) throws 
IOException {
-    filePath = filePath.replace("\\", "/");
-    switch (fileType) {
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path path = new Path(filePath);
-        FileSystem fs = path.getFileSystem(configuration);
-        return fs.mkdirs(path);
-      case LOCAL:
-      default:
-        File file = new File(filePath);
-        return file.mkdirs();
-    }
-  }
-
-  /**
-   * for getting the dataoutput stream using the hdfs filesystem append API.
-   *
-   * @param path
-   * @param fileType
-   * @return
-   * @throws IOException
-   */
-  public static DataOutputStream getDataOutputStreamUsingAppend(String path, 
FileType fileType)
-      throws IOException {
-    path = path.replace("\\", "/");
-    switch (fileType) {
-      case LOCAL:
-        return new DataOutputStream(new BufferedOutputStream(new 
FileOutputStream(path, true)));
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path pt = new Path(path);
-        FileSystem fs = pt.getFileSystem(configuration);
-        FSDataOutputStream stream = fs.append(pt);
-        return stream;
-      default:
-        return new DataOutputStream(new BufferedOutputStream(new 
FileOutputStream(path)));
-    }
-  }
-
-  /**
-   * for creating a new Lock file and if it is successfully created
-   * then in case of abrupt shutdown then the stream to that file will be 
closed.
-   *
-   * @param filePath
-   * @param fileType
-   * @return
-   * @throws IOException
-   */
-  public static boolean createNewLockFile(String filePath, FileType fileType) 
throws IOException {
-    filePath = filePath.replace("\\", "/");
-    switch (fileType) {
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path path = new Path(filePath);
-        FileSystem fs = path.getFileSystem(configuration);
-        if (fs.createNewFile(path)) {
-          fs.deleteOnExit(path);
-          return true;
-        }
-        return false;
-      case LOCAL:
-      default:
-        File file = new File(filePath);
-        return file.createNewFile();
-    }
-  }
-
-  public enum FileType {
-    LOCAL, HDFS, ALLUXIO, VIEWFS
-  }
-
-  /**
-   * below method will be used to update the file path
-   * for local type
-   * it removes the file:/ from the path
-   *
-   * @param filePath
-   * @return updated file path without url for local
-   */
-  public static String getUpdatedFilePath(String filePath) {
-    FileType fileType = getFileType(filePath);
-    switch (fileType) {
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        return filePath;
-      case LOCAL:
-      default:
-        Path pathWithoutSchemeAndAuthority =
-            Path.getPathWithoutSchemeAndAuthority(new Path(filePath));
-        return pathWithoutSchemeAndAuthority.toString();
-    }
-  }
-
-  /**
-   * It computes size of directory
-   *
-   * @param filePath
-   * @return size in bytes
-   * @throws IOException
-   */
-  public static long getDirectorySize(String filePath) throws IOException {
-    FileType fileType = getFileType(filePath);
-    switch (fileType) {
-      case HDFS:
-      case ALLUXIO:
-      case VIEWFS:
-        Path path = new Path(filePath);
-        FileSystem fs = path.getFileSystem(configuration);
-        return fs.getContentSummary(path).getLength();
-      case LOCAL:
-      default:
-        File file = new File(filePath);
-        return FileUtils.sizeOfDirectory(file);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/FileHolderImpl.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/FileHolderImpl.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/FileHolderImpl.java
deleted file mode 100644
index 9a5c06a..0000000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/FileHolderImpl.java
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.datastorage.store.impl;
-
-import java.io.FileInputStream;
-import java.io.FileNotFoundException;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.channels.FileChannel;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Map.Entry;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-
-public class FileHolderImpl implements FileHolder {
-  /**
-   * cache to hold filename and its stream
-   */
-  private Map<String, FileChannel> fileNameAndStreamCache;
-
-  /**
-   * FileHolderImpl Constructor
-   * It will create the cache
-   */
-  public FileHolderImpl() {
-    this.fileNameAndStreamCache =
-        new HashMap<String, 
FileChannel>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-  }
-
-  public FileHolderImpl(int capacity) {
-    this.fileNameAndStreamCache = new HashMap<String, FileChannel>(capacity);
-  }
-
-  /**
-   * This method will be used to read the byte array from file based on offset
-   * and length(number of bytes) need to read
-   *
-   * @param filePath fully qualified file path
-   * @param offset   reading start position,
-   * @param length   number of bytes to be read
-   * @return read byte array
-   */
-  @Override public byte[] readByteArray(String filePath, long offset, int 
length)
-      throws IOException {
-    FileChannel fileChannel = updateCache(filePath);
-    ByteBuffer byteBffer = read(fileChannel, length, offset);
-    return byteBffer.array();
-  }
-
-  /**
-   * This method will be used to close all the streams currently present in 
the cache
-   */
-  @Override public void finish() throws IOException {
-    for (Entry<String, FileChannel> entry : fileNameAndStreamCache.entrySet()) 
{
-      FileChannel channel = entry.getValue();
-      if (null != channel) {
-        channel.close();
-      }
-    }
-  }
-
-  /**
-   * This method will be used to read int from file from postion(offset), here
-   * length will be always 4 bacause int byte size if 4
-   *
-   * @param filePath fully qualified file path
-   * @param offset   reading start position,
-   * @return read int
-   */
-  @Override public int readInt(String filePath, long offset) throws 
IOException {
-    FileChannel fileChannel = updateCache(filePath);
-    ByteBuffer byteBffer = read(fileChannel, 
CarbonCommonConstants.INT_SIZE_IN_BYTE, offset);
-    return byteBffer.getInt();
-  }
-
-  /**
-   * This method will be used to read int from file from postion(offset), here
-   * length will be always 4 bacause int byte size if 4
-   *
-   * @param filePath fully qualified file path
-   * @return read int
-   */
-  @Override public int readInt(String filePath) throws IOException {
-    FileChannel fileChannel = updateCache(filePath);
-    ByteBuffer byteBffer = read(fileChannel, 
CarbonCommonConstants.INT_SIZE_IN_BYTE);
-    return byteBffer.getInt();
-  }
-
-  /**
-   * This method will be used to read int from file from postion(offset), here
-   * length will be always 4 bacause int byte size if 4
-   *
-   * @param filePath fully qualified file path
-   * @param offset   reading start position,
-   * @return read int
-   */
-  @Override public long readDouble(String filePath, long offset) throws 
IOException {
-    FileChannel fileChannel = updateCache(filePath);
-    ByteBuffer byteBffer = read(fileChannel, 
CarbonCommonConstants.LONG_SIZE_IN_BYTE, offset);
-    return byteBffer.getLong();
-  }
-
-  /**
-   * This method will be used to check whether stream is already present in
-   * cache or not for filepath if not present then create it and then add to
-   * cache, other wise get from cache
-   *
-   * @param filePath fully qualified file path
-   * @return channel
-   */
-  private FileChannel updateCache(String filePath) throws 
FileNotFoundException {
-    FileChannel fileChannel = fileNameAndStreamCache.get(filePath);
-    if (null == fileChannel) {
-      FileInputStream stream = new FileInputStream(filePath);
-      fileChannel = stream.getChannel();
-      fileNameAndStreamCache.put(filePath, fileChannel);
-    }
-    return fileChannel;
-  }
-
-  /**
-   * This method will be used to read from file based on number of bytes to be 
read and positon
-   *
-   * @param channel file channel
-   * @param size    number of bytes
-   * @param offset  position
-   * @return byte buffer
-   */
-  private ByteBuffer read(FileChannel channel, int size, long offset) throws 
IOException {
-    ByteBuffer byteBffer = ByteBuffer.allocate(size);
-    channel.position(offset);
-    channel.read(byteBffer);
-    byteBffer.rewind();
-    return byteBffer;
-  }
-
-  /**
-   * This method will be used to read from file based on number of bytes to be 
read and positon
-   *
-   * @param channel file channel
-   * @param size    number of bytes
-   * @return byte buffer
-   */
-  private ByteBuffer read(FileChannel channel, int size) throws IOException {
-    ByteBuffer byteBffer = ByteBuffer.allocate(size);
-    channel.read(byteBffer);
-    byteBffer.rewind();
-    return byteBffer;
-  }
-
-
-  /**
-   * This method will be used to read the byte array from file based on 
length(number of bytes)
-   *
-   * @param filePath fully qualified file path
-   * @param length   number of bytes to be read
-   * @return read byte array
-   */
-  @Override public byte[] readByteArray(String filePath, int length) throws 
IOException {
-    FileChannel fileChannel = updateCache(filePath);
-    ByteBuffer byteBffer = read(fileChannel, length);
-    return byteBffer.array();
-  }
-
-  /**
-   * This method will be used to read long from file from postion(offset), here
-   * length will be always 8 bacause int byte size is 8
-   *
-   * @param filePath fully qualified file path
-   * @param offset   reading start position,
-   * @return read long
-   */
-  @Override public long readLong(String filePath, long offset) throws 
IOException {
-    FileChannel fileChannel = updateCache(filePath);
-    ByteBuffer byteBffer = read(fileChannel, 
CarbonCommonConstants.LONG_SIZE_IN_BYTE, offset);
-    return byteBffer.getLong();
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/data/compressed/AbstractHeavyCompressedDoubleArrayDataStore.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/data/compressed/AbstractHeavyCompressedDoubleArrayDataStore.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/data/compressed/AbstractHeavyCompressedDoubleArrayDataStore.java
deleted file mode 100644
index ad1d62f..0000000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/data/compressed/AbstractHeavyCompressedDoubleArrayDataStore.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.datastorage.store.impl.data.compressed;
-
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.NodeMeasureDataStore;
-import 
org.apache.carbondata.core.datastorage.store.compression.ValueCompressionHolder;
-import 
org.apache.carbondata.core.datastorage.store.compression.WriterCompressModel;
-import 
org.apache.carbondata.core.datastorage.store.dataholder.CarbonWriteDataHolder;
-import org.apache.carbondata.core.util.ValueCompressionUtil;
-
-public abstract class AbstractHeavyCompressedDoubleArrayDataStore
-    implements NodeMeasureDataStore //NodeMeasureDataStore<double[]>
-{
-
-  private LogService LOGGER =
-      
LogServiceFactory.getLogService(AbstractHeavyCompressedDoubleArrayDataStore.class.getName());
-
-  /**
-   * values.
-   */
-  protected ValueCompressionHolder[] values;
-
-  /**
-   * compressionModel.
-   */
-  protected WriterCompressModel compressionModel;
-
-  /**
-   * type
-   */
-  private char[] type;
-
-  /**
-   * AbstractHeavyCompressedDoubleArrayDataStore constructor.
-   *
-   * @param compressionModel
-   */
-  public AbstractHeavyCompressedDoubleArrayDataStore(WriterCompressModel 
compressionModel) {
-    this.compressionModel = compressionModel;
-    if (null != compressionModel) {
-      this.type = compressionModel.getType();
-      values =
-          new 
ValueCompressionHolder[compressionModel.getValueCompressionHolder().length];
-    }
-  }
-
-  // this method first invokes encoding routine to encode the data chunk,
-  // followed by invoking compression routine for preparing the data chunk for 
writing.
-  @Override public byte[][] 
getWritableMeasureDataArray(CarbonWriteDataHolder[] dataHolder) {
-    byte[][] returnValue = new byte[values.length][];
-    for (int i = 0; i < compressionModel.getValueCompressionHolder().length; 
i++) {
-      values[i] = compressionModel.getValueCompressionHolder()[i];
-      if (type[i] != CarbonCommonConstants.BYTE_VALUE_MEASURE) {
-        // first perform encoding of the data chunk
-        values[i].setValue(
-            
ValueCompressionUtil.getValueCompressor(compressionModel.getCompressionFinders()[i])
-                
.getCompressedValues(compressionModel.getCompressionFinders()[i], dataHolder[i],
-                    compressionModel.getMaxValue()[i],
-                    compressionModel.getMantissa()[i]));
-      } else {
-        values[i].setValue(dataHolder[i].getWritableByteArrayValues());
-      }
-      values[i].compress();
-      returnValue[i] = values[i].getCompressedData();
-    }
-
-    return returnValue;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/data/compressed/HeavyCompressedDoubleArrayDataInMemoryStore.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/data/compressed/HeavyCompressedDoubleArrayDataInMemoryStore.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/data/compressed/HeavyCompressedDoubleArrayDataInMemoryStore.java
deleted file mode 100644
index 0c29143..0000000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastorage/store/impl/data/compressed/HeavyCompressedDoubleArrayDataInMemoryStore.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.datastorage.store.impl.data.compressed;
-
-import 
org.apache.carbondata.core.datastorage.store.compression.WriterCompressModel;
-
-public class HeavyCompressedDoubleArrayDataInMemoryStore
-    extends AbstractHeavyCompressedDoubleArrayDataStore {
-
-  public HeavyCompressedDoubleArrayDataInMemoryStore(WriterCompressModel 
compressionModel) {
-    super(compressionModel);
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastorage/util/StoreFactory.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastorage/util/StoreFactory.java
 
b/core/src/main/java/org/apache/carbondata/core/datastorage/util/StoreFactory.java
deleted file mode 100644
index 7561ec7..0000000
--- 
a/core/src/main/java/org/apache/carbondata/core/datastorage/util/StoreFactory.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.datastorage.util;
-
-import org.apache.carbondata.core.datastorage.store.NodeMeasureDataStore;
-import 
org.apache.carbondata.core.datastorage.store.compression.WriterCompressModel;
-import 
org.apache.carbondata.core.datastorage.store.impl.data.compressed.HeavyCompressedDoubleArrayDataInMemoryStore;
-
-public final class StoreFactory {
-
-  private StoreFactory() {
-  }
-
-  public static NodeMeasureDataStore createDataStore(WriterCompressModel 
compressionModel) {
-    return new HeavyCompressedDoubleArrayDataInMemoryStore(compressionModel);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/BlockIndexStore.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/BlockIndexStore.java 
b/core/src/main/java/org/apache/carbondata/core/datastore/BlockIndexStore.java
index a80661c..68d0e95 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/BlockIndexStore.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/BlockIndexStore.java
@@ -46,9 +46,9 @@ import 
org.apache.carbondata.core.datastore.block.TableBlockUniqueIdentifier;
 import org.apache.carbondata.core.datastore.exception.IndexBuilderException;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.update.CarbonUpdateUtil;
-import org.apache.carbondata.core.update.UpdateVO;
+import org.apache.carbondata.core.update.data.UpdateVO;
 import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.scan.model.QueryModel;
+import org.apache.carbondata.core.scan.model.QueryModel;
 
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/DataRefNode.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/DataRefNode.java 
b/core/src/main/java/org/apache/carbondata/core/datastore/DataRefNode.java
index 81d80ef..219ee73 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/DataRefNode.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/DataRefNode.java
@@ -20,10 +20,10 @@ package org.apache.carbondata.core.datastore;
 
 import java.io.IOException;
 
-import 
org.apache.carbondata.common.iudprocessor.cache.BlockletLevelDeleteDeltaDataCache;
+import 
org.apache.carbondata.core.update.data.BlockletLevelDeleteDeltaDataCache;
 import org.apache.carbondata.core.datastore.chunk.DimensionColumnDataChunk;
 import org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
+import org.apache.carbondata.core.datastorage.FileHolder;
 
 /**
  * Interface data block reference

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
index 03765a4..1ae8d28 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/SegmentTaskIndexStore.java
@@ -41,7 +41,7 @@ import 
org.apache.carbondata.core.datastore.exception.IndexBuilderException;
 import org.apache.carbondata.core.metadata.blocklet.DataFileFooter;
 import org.apache.carbondata.core.path.CarbonTablePath;
 import org.apache.carbondata.core.path.CarbonTablePath.DataFileUtil;
-import org.apache.carbondata.core.update.UpdateVO;
+import org.apache.carbondata.core.update.data.UpdateVO;
 import org.apache.carbondata.core.updatestatus.SegmentUpdateStatusManager;
 import org.apache.carbondata.core.util.CarbonUtil;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
index 2776fd5..e84c8ba 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentProperties.java
@@ -34,7 +34,7 @@ import 
org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
 import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.columnar.ColumnGroupModel;
+import org.apache.carbondata.core.datastorage.columnar.ColumnGroupModel;
 import org.apache.carbondata.core.keygenerator.KeyGenerator;
 import org.apache.carbondata.core.keygenerator.columnar.ColumnarSplitter;
 import 
org.apache.carbondata.core.keygenerator.columnar.impl.MultiDimKeyVarLengthVariableSplitGenerator;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentTaskIndexWrapper.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentTaskIndexWrapper.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentTaskIndexWrapper.java
index 383dba2..dc4c938 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentTaskIndexWrapper.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/block/SegmentTaskIndexWrapper.java
@@ -25,7 +25,7 @@ import java.util.concurrent.atomic.AtomicLong;
 
 import org.apache.carbondata.core.cache.Cacheable;
 import org.apache.carbondata.core.datastore.SegmentTaskIndexStore;
-import org.apache.carbondata.core.update.UpdateVO;
+import org.apache.carbondata.core.update.data.UpdateVO;
 
 /**
  * SegmentTaskIndexWrapper class holds the  taskIdToTableSegmentMap

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/block/TableBlockInfo.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/block/TableBlockInfo.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/block/TableBlockInfo.java
index 37c1aa0..a5061d2 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/block/TableBlockInfo.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/block/TableBlockInfo.java
@@ -26,7 +26,7 @@ import org.apache.carbondata.core.ColumnarFormatVersion;
 import org.apache.carbondata.core.path.CarbonTablePath;
 import org.apache.carbondata.core.path.CarbonTablePath.DataFileUtil;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
+import org.apache.carbondata.core.datastorage.impl.FileFactory;
 
 /**
  * class will be used to pass the block detail detail will be passed form 
driver

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/chunk/DimensionColumnDataChunk.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/DimensionColumnDataChunk.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/DimensionColumnDataChunk.java
index 731bf95..086785a 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/DimensionColumnDataChunk.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/DimensionColumnDataChunk.java
@@ -18,8 +18,8 @@
  */
 package org.apache.carbondata.core.datastore.chunk;
 
-import org.apache.carbondata.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.scan.result.vector.ColumnVectorInfo;
+import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
+import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
 
 /**
  * Interface for dimension column chunk.

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/chunk/MeasureColumnDataChunk.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/MeasureColumnDataChunk.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/MeasureColumnDataChunk.java
index 285f66e..d2ee1a7 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/MeasureColumnDataChunk.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/MeasureColumnDataChunk.java
@@ -19,7 +19,7 @@
 package org.apache.carbondata.core.datastore.chunk;
 
 import org.apache.carbondata.core.metadata.blocklet.datachunk.PresenceMeta;
-import 
org.apache.carbondata.core.datastorage.store.dataholder.CarbonReadDataHolder;
+import org.apache.carbondata.core.datastorage.dataholder.CarbonReadDataHolder;
 
 /**
  * Holder for measure column chunk

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
index 08875c2..bad5679 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/ColumnGroupDimensionDataChunk.java
@@ -20,8 +20,8 @@ package org.apache.carbondata.core.datastore.chunk.impl;
 
 import 
org.apache.carbondata.core.datastore.chunk.store.DimensionChunkStoreFactory;
 import 
org.apache.carbondata.core.datastore.chunk.store.DimensionChunkStoreFactory.DimensionStoreType;
-import org.apache.carbondata.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.scan.result.vector.ColumnVectorInfo;
+import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
+import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
 
 /**
  * This class is gives access to column group dimension data chunk store

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
index c1f668b..9eee078 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/FixedLengthDimensionDataChunk.java
@@ -21,9 +21,9 @@ package org.apache.carbondata.core.datastore.chunk.impl;
 import 
org.apache.carbondata.core.datastore.chunk.store.DimensionChunkStoreFactory;
 import 
org.apache.carbondata.core.datastore.chunk.store.DimensionChunkStoreFactory.DimensionStoreType;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.scan.result.vector.CarbonColumnVector;
-import org.apache.carbondata.scan.result.vector.ColumnVectorInfo;
+import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
+import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
+import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
 
 /**
  * This class is gives access to fixed length dimension data chunk store

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
index 5084e38..7c16f54 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/VariableLengthDimensionDataChunk.java
@@ -23,9 +23,9 @@ import java.util.Arrays;
 import 
org.apache.carbondata.core.datastore.chunk.store.DimensionChunkStoreFactory;
 import 
org.apache.carbondata.core.datastore.chunk.store.DimensionChunkStoreFactory.DimensionStoreType;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.scan.executor.infos.KeyStructureInfo;
-import org.apache.carbondata.scan.result.vector.CarbonColumnVector;
-import org.apache.carbondata.scan.result.vector.ColumnVectorInfo;
+import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
+import org.apache.carbondata.core.scan.result.vector.CarbonColumnVector;
+import org.apache.carbondata.core.scan.result.vector.ColumnVectorInfo;
 
 /**
  * This class is gives access to variable length dimension data chunk store

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/DimensionColumnChunkReader.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/DimensionColumnChunkReader.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/DimensionColumnChunkReader.java
index 0722df4..aeb7ff9 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/DimensionColumnChunkReader.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/DimensionColumnChunkReader.java
@@ -21,7 +21,7 @@ package org.apache.carbondata.core.datastore.chunk.reader;
 import java.io.IOException;
 
 import org.apache.carbondata.core.datastore.chunk.DimensionColumnDataChunk;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
+import org.apache.carbondata.core.datastorage.FileHolder;
 
 /**
  * Interface for reading the data chunk

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/MeasureColumnChunkReader.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/MeasureColumnChunkReader.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/MeasureColumnChunkReader.java
index 7d3f891..ec86612 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/MeasureColumnChunkReader.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/MeasureColumnChunkReader.java
@@ -21,7 +21,7 @@ package org.apache.carbondata.core.datastore.chunk.reader;
 import java.io.IOException;
 
 import org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
+import org.apache.carbondata.core.datastorage.FileHolder;
 
 /**
  * Reader interface for reading the measure blocks from file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/AbstractChunkReader.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/AbstractChunkReader.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/AbstractChunkReader.java
index 450dc58..4e45bc3 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/AbstractChunkReader.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/AbstractChunkReader.java
@@ -20,8 +20,8 @@ package 
org.apache.carbondata.core.datastore.chunk.reader.dimension;
 
 import 
org.apache.carbondata.core.datastore.chunk.reader.DimensionColumnChunkReader;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.compression.Compressor;
-import 
org.apache.carbondata.core.datastorage.store.compression.CompressorFactory;
+import org.apache.carbondata.core.datastorage.compression.Compressor;
+import org.apache.carbondata.core.datastorage.compression.CompressorFactory;
 import org.apache.carbondata.core.keygenerator.mdkey.NumberCompressor;
 import org.apache.carbondata.core.util.CarbonProperties;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
index 2db8667..01df68b 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
@@ -29,8 +29,8 @@ import 
org.apache.carbondata.core.datastore.chunk.reader.dimension.AbstractChunk
 import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
 import org.apache.carbondata.core.metadata.blocklet.datachunk.DataChunk;
 import org.apache.carbondata.core.metadata.Encoding;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-import org.apache.carbondata.core.datastorage.store.columnar.UnBlockIndexer;
+import org.apache.carbondata.core.datastorage.FileHolder;
+import org.apache.carbondata.core.datastorage.columnar.UnBlockIndexer;
 import org.apache.carbondata.core.util.CarbonUtil;
 
 /**

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
index 07baed3..48b5aeb 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
@@ -27,8 +27,8 @@ import 
org.apache.carbondata.core.datastore.chunk.impl.FixedLengthDimensionDataC
 import 
org.apache.carbondata.core.datastore.chunk.impl.VariableLengthDimensionDataChunk;
 import 
org.apache.carbondata.core.datastore.chunk.reader.dimension.AbstractChunkReader;
 import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-import org.apache.carbondata.core.datastorage.store.columnar.UnBlockIndexer;
+import org.apache.carbondata.core.datastorage.FileHolder;
+import org.apache.carbondata.core.datastorage.columnar.UnBlockIndexer;
 import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.format.DataChunk2;
 import org.apache.carbondata.format.Encoding;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
index c09b7e9..562ea8c 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
@@ -25,10 +25,10 @@ import 
org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk;
 import 
org.apache.carbondata.core.datastore.chunk.reader.measure.AbstractMeasureChunkReader;
 import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
 import org.apache.carbondata.core.metadata.blocklet.datachunk.DataChunk;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-import 
org.apache.carbondata.core.datastorage.store.compression.ReaderCompressModel;
-import 
org.apache.carbondata.core.datastorage.store.compression.ValueCompressionHolder;
-import 
org.apache.carbondata.core.datastorage.store.dataholder.CarbonReadDataHolder;
+import org.apache.carbondata.core.datastorage.FileHolder;
+import org.apache.carbondata.core.datastorage.compression.ReaderCompressModel;
+import 
org.apache.carbondata.core.datastorage.compression.ValueCompressionHolder;
+import org.apache.carbondata.core.datastorage.dataholder.CarbonReadDataHolder;
 import org.apache.carbondata.core.metadata.ValueEncoderMeta;
 import org.apache.carbondata.core.util.ValueCompressionUtil;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
index 9e3db25..90fa6f3 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
@@ -27,11 +27,11 @@ import 
org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk;
 import 
org.apache.carbondata.core.datastore.chunk.reader.measure.AbstractMeasureChunkReader;
 import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
 import org.apache.carbondata.core.metadata.blocklet.datachunk.PresenceMeta;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-import 
org.apache.carbondata.core.datastorage.store.compression.CompressorFactory;
-import 
org.apache.carbondata.core.datastorage.store.compression.ValueCompressionHolder;
-import 
org.apache.carbondata.core.datastorage.store.compression.WriterCompressModel;
-import 
org.apache.carbondata.core.datastorage.store.dataholder.CarbonReadDataHolder;
+import org.apache.carbondata.core.datastorage.FileHolder;
+import org.apache.carbondata.core.datastorage.compression.CompressorFactory;
+import 
org.apache.carbondata.core.datastorage.compression.ValueCompressionHolder;
+import org.apache.carbondata.core.datastorage.compression.WriterCompressModel;
+import org.apache.carbondata.core.datastorage.dataholder.CarbonReadDataHolder;
 import org.apache.carbondata.core.metadata.ValueEncoderMeta;
 import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.format.DataChunk2;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
index 6fdf5d0..14df51f 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/AbstractBTreeLeafNode.java
@@ -20,12 +20,12 @@ package org.apache.carbondata.core.datastore.impl.btree;
 
 import java.io.IOException;
 
-import 
org.apache.carbondata.common.iudprocessor.cache.BlockletLevelDeleteDeltaDataCache;
+import 
org.apache.carbondata.core.update.data.BlockletLevelDeleteDeltaDataCache;
 import org.apache.carbondata.core.datastore.DataRefNode;
 import org.apache.carbondata.core.datastore.IndexKey;
 import org.apache.carbondata.core.datastore.chunk.DimensionColumnDataChunk;
 import org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
+import org.apache.carbondata.core.datastorage.FileHolder;
 
 /**
  * Non leaf node abstract class

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BTreeNonLeafNode.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BTreeNonLeafNode.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BTreeNonLeafNode.java
index 9332b93..c2ffae9 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BTreeNonLeafNode.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BTreeNonLeafNode.java
@@ -21,13 +21,13 @@ package org.apache.carbondata.core.datastore.impl.btree;
 import java.util.ArrayList;
 import java.util.List;
 
-import 
org.apache.carbondata.common.iudprocessor.cache.BlockletLevelDeleteDeltaDataCache;
+import 
org.apache.carbondata.core.update.data.BlockletLevelDeleteDeltaDataCache;
 import org.apache.carbondata.core.datastore.DataRefNode;
 import org.apache.carbondata.core.datastore.IndexKey;
 import org.apache.carbondata.core.datastore.chunk.DimensionColumnDataChunk;
 import org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
+import org.apache.carbondata.core.datastorage.FileHolder;
 
 /**
  * No leaf node of a b+tree class which will keep the matadata(start key) of 
the

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BlockletBTreeLeafNode.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BlockletBTreeLeafNode.java
 
b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BlockletBTreeLeafNode.java
index e9a7a1a..f3d2bef 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BlockletBTreeLeafNode.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BlockletBTreeLeafNode.java
@@ -27,7 +27,7 @@ import 
org.apache.carbondata.core.datastore.chunk.reader.CarbonDataReaderFactory
 import 
org.apache.carbondata.core.datastore.chunk.reader.DimensionColumnChunkReader;
 import 
org.apache.carbondata.core.datastore.chunk.reader.MeasureColumnChunkReader;
 import org.apache.carbondata.core.metadata.blocklet.index.BlockletMinMaxIndex;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
+import org.apache.carbondata.core.datastorage.FileHolder;
 
 /**
  * Leaf node class of a Blocklet btree

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
 
b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
index 47ca673..9df57b0 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/dictionary/generator/IncrementalColumnDictionaryGenerator.java
@@ -22,7 +22,7 @@ import java.io.IOException;
 import java.util.*;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.carbondata.common.factory.CarbonCommonFactory;
+import org.apache.carbondata.core.service.CarbonCommonFactory;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.Cache;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/load/BlockDetails.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/load/BlockDetails.java 
b/core/src/main/java/org/apache/carbondata/core/load/BlockDetails.java
index 69c7cf5..5f00f23 100644
--- a/core/src/main/java/org/apache/carbondata/core/load/BlockDetails.java
+++ b/core/src/main/java/org/apache/carbondata/core/load/BlockDetails.java
@@ -21,7 +21,7 @@ package org.apache.carbondata.core.load;
 
 import java.io.Serializable;
 
-import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
+import org.apache.carbondata.core.datastorage.impl.FileFactory;
 
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.lib.input.FileSplit;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/metadata/BlockletInfoColumnar.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/metadata/BlockletInfoColumnar.java
 
b/core/src/main/java/org/apache/carbondata/core/metadata/BlockletInfoColumnar.java
index c42bc93..1f3090d 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/metadata/BlockletInfoColumnar.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/metadata/BlockletInfoColumnar.java
@@ -21,7 +21,7 @@ package org.apache.carbondata.core.metadata;
 
 import java.util.BitSet;
 
-import 
org.apache.carbondata.core.datastorage.store.compression.WriterCompressModel;
+import org.apache.carbondata.core.datastorage.compression.WriterCompressModel;
 
 public class BlockletInfoColumnar {
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/partition/Partitioner.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/partition/Partitioner.java 
b/core/src/main/java/org/apache/carbondata/core/partition/Partitioner.java
deleted file mode 100644
index 1907687..0000000
--- a/core/src/main/java/org/apache/carbondata/core/partition/Partitioner.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.partition;
-
-/**
- * Partitions the data as per key
- */
-public interface Partitioner<Key> {
-
-  int getPartition(Key key);
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/partition/impl/HashPartitionerImpl.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/partition/impl/HashPartitionerImpl.java
 
b/core/src/main/java/org/apache/carbondata/core/partition/impl/HashPartitionerImpl.java
deleted file mode 100644
index 18936d6..0000000
--- 
a/core/src/main/java/org/apache/carbondata/core/partition/impl/HashPartitionerImpl.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.core.partition.impl;
-
-import java.util.List;
-
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.core.partition.Partitioner;
-
-/**
- * Hash partitioner implementation
- */
-public class HashPartitionerImpl implements Partitioner<Object[]> {
-
-  private int numberOfBuckets;
-
-  private Hash[] hashes;
-
-  public HashPartitionerImpl(List<Integer> indexes, List<ColumnSchema> 
columnSchemas,
-      int numberOfBuckets) {
-    this.numberOfBuckets = numberOfBuckets;
-    hashes = new Hash[indexes.size()];
-    for (int i = 0; i < indexes.size(); i++) {
-      switch(columnSchemas.get(i).getDataType()) {
-        case SHORT:
-        case INT:
-        case LONG:
-          hashes[i] = new IntegralHash(indexes.get(i));
-          break;
-        case DOUBLE:
-        case FLOAT:
-        case DECIMAL:
-          hashes[i] = new DecimalHash(indexes.get(i));
-          break;
-        default:
-          hashes[i] = new StringHash(indexes.get(i));
-      }
-    }
-  }
-
-  @Override public int getPartition(Object[] objects) {
-    int hashCode = 0;
-    for (Hash hash : hashes) {
-      hashCode += hash.getHash(objects);
-    }
-    return (hashCode & Integer.MAX_VALUE) % numberOfBuckets;
-  }
-
-  private interface Hash {
-    int getHash(Object[] value);
-  }
-
-  private static class IntegralHash implements Hash {
-
-    private int index;
-
-    private IntegralHash(int index) {
-      this.index = index;
-    }
-
-    public int getHash(Object[] value) {
-      return value[index] != null ? 
Long.valueOf(value[index].toString()).hashCode() : 0;
-    }
-  }
-
-  private static class DecimalHash implements Hash {
-
-    private int index;
-
-    private DecimalHash(int index) {
-      this.index = index;
-    }
-
-    public int getHash(Object[] value) {
-      return value[index] != null ? 
Double.valueOf(value[index].toString()).hashCode() : 0;
-    }
-  }
-
-  private static class StringHash implements Hash {
-
-    private int index;
-
-    private StringHash(int index) {
-      this.index = index;
-    }
-
-    @Override public int getHash(Object[] value) {
-      return value[index] != null ? value[index].hashCode() : 0;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/path/CarbonTablePath.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/path/CarbonTablePath.java 
b/core/src/main/java/org/apache/carbondata/core/path/CarbonTablePath.java
index 337e2bb..b762ca1 100644
--- a/core/src/main/java/org/apache/carbondata/core/path/CarbonTablePath.java
+++ b/core/src/main/java/org/apache/carbondata/core/path/CarbonTablePath.java
@@ -22,9 +22,9 @@ import java.io.File;
 
 import org.apache.carbondata.core.CarbonTableIdentifier;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.filesystem.CarbonFile;
-import 
org.apache.carbondata.core.datastorage.store.filesystem.CarbonFileFilter;
-import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
+import org.apache.carbondata.core.datastorage.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastorage.filesystem.CarbonFileFilter;
+import org.apache.carbondata.core.datastorage.impl.FileFactory;
 import static 
org.apache.carbondata.core.constants.CarbonCommonConstants.INVALID_SEGMENT_ID;
 
 import org.apache.hadoop.fs.Path;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteDeltaFileReaderImpl.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteDeltaFileReaderImpl.java
 
b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteDeltaFileReaderImpl.java
index dac2d43..b76353c 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteDeltaFileReaderImpl.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteDeltaFileReaderImpl.java
@@ -24,7 +24,7 @@ import java.io.*;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
+import org.apache.carbondata.core.datastorage.impl.FileFactory;
 import org.apache.carbondata.core.update.DeleteDeltaBlockDetails;
 import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.fileoperations.AtomicFileOperations;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java
 
b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java
index a0922ec..04f6dfd 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDeleteFilesDataReader.java
@@ -29,7 +29,7 @@ import java.util.concurrent.*;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
+import org.apache.carbondata.core.datastorage.impl.FileFactory;
 import org.apache.carbondata.core.update.DeleteDeltaBlockDetails;
 import org.apache.carbondata.core.update.DeleteDeltaBlockletDetails;
 import org.apache.carbondata.core.util.CarbonProperties;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
 
b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
index 9bf1919..43ea57f 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryMetadataReaderImpl.java
@@ -23,7 +23,7 @@ import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.carbondata.common.factory.CarbonCommonFactory;
+import org.apache.carbondata.core.service.CarbonCommonFactory;
 import org.apache.carbondata.core.CarbonTableIdentifier;
 import org.apache.carbondata.core.ColumnIdentifier;
 import org.apache.carbondata.core.path.CarbonTablePath;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryReaderImpl.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryReaderImpl.java
 
b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryReaderImpl.java
index ea87ed1..27b1fc3 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryReaderImpl.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/reader/CarbonDictionaryReaderImpl.java
@@ -25,7 +25,7 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.carbondata.common.factory.CarbonCommonFactory;
+import org.apache.carbondata.core.service.CarbonCommonFactory;
 import 
org.apache.carbondata.core.cache.dictionary.ColumnDictionaryChunkIterator;
 import org.apache.carbondata.core.CarbonTableIdentifier;
 import org.apache.carbondata.core.ColumnIdentifier;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/reader/ThriftReader.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/reader/ThriftReader.java 
b/core/src/main/java/org/apache/carbondata/core/reader/ThriftReader.java
index 81b7661..8a7abd6 100644
--- a/core/src/main/java/org/apache/carbondata/core/reader/ThriftReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/reader/ThriftReader.java
@@ -22,7 +22,7 @@ package org.apache.carbondata.core.reader;
 import java.io.DataInputStream;
 import java.io.IOException;
 
-import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
+import org.apache.carbondata.core.datastorage.impl.FileFactory;
 import org.apache.carbondata.core.util.CarbonUtil;
 
 import org.apache.thrift.TBase;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
 
b/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
index 99adbb4..1dd51d4 100644
--- 
a/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
+++ 
b/core/src/main/java/org/apache/carbondata/core/reader/sortindex/CarbonDictionarySortIndexReaderImpl.java
@@ -21,13 +21,13 @@ package org.apache.carbondata.core.reader.sortindex;
 import java.io.IOException;
 import java.util.List;
 
-import org.apache.carbondata.common.factory.CarbonCommonFactory;
+import org.apache.carbondata.core.service.CarbonCommonFactory;
 import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.CarbonTableIdentifier;
 import org.apache.carbondata.core.ColumnIdentifier;
 import org.apache.carbondata.core.path.CarbonTablePath;
-import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
+import org.apache.carbondata.core.datastorage.impl.FileFactory;
 import org.apache.carbondata.core.reader.CarbonDictionaryColumnMetaChunk;
 import org.apache.carbondata.core.reader.CarbonDictionaryMetadataReader;
 import org.apache.carbondata.core.reader.CarbonDictionaryMetadataReaderImpl;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/scan/collector/ScannedResultCollector.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/collector/ScannedResultCollector.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/collector/ScannedResultCollector.java
new file mode 100644
index 0000000..7ea0625
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/collector/ScannedResultCollector.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.core.scan.collector;
+
+import java.util.List;
+
+import org.apache.carbondata.core.scan.result.AbstractScannedResult;
+import org.apache.carbondata.core.scan.result.vector.CarbonColumnarBatch;
+
+/**
+ * Interface which will be used to aggregate the scan result
+ */
+public interface ScannedResultCollector {
+
+  /**
+   * Below method will be used to aggregate the scanned result
+   *
+   * @param scannedResult scanned result
+   * @return how many records was aggregated
+   */
+  List<Object[]> collectData(AbstractScannedResult scannedResult, int 
batchSize);
+
+  /**
+   * Collects data in columnar format.
+   * @param scannedResult
+   * @param columnarBatch
+   */
+  void collectVectorBatch(AbstractScannedResult scannedResult, 
CarbonColumnarBatch columnarBatch);
+}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/952cf517/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java
----------------------------------------------------------------------
diff --git 
a/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java
 
b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java
new file mode 100644
index 0000000..3f7dd4a
--- /dev/null
+++ 
b/core/src/main/java/org/apache/carbondata/core/scan/collector/impl/AbstractScannedResultCollector.java
@@ -0,0 +1,159 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.core.scan.collector.impl;
+
+import java.util.List;
+
+import org.apache.carbondata.common.logging.LogService;
+import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.core.datastore.chunk.MeasureColumnDataChunk;
+import org.apache.carbondata.core.metadata.DataType;
+import org.apache.carbondata.core.keygenerator.KeyGenException;
+import org.apache.carbondata.core.scan.collector.ScannedResultCollector;
+import org.apache.carbondata.core.scan.executor.infos.BlockExecutionInfo;
+import org.apache.carbondata.core.scan.executor.infos.KeyStructureInfo;
+import org.apache.carbondata.core.scan.executor.util.QueryUtil;
+import org.apache.carbondata.core.scan.result.AbstractScannedResult;
+import org.apache.carbondata.core.scan.result.vector.CarbonColumnarBatch;
+import org.apache.carbondata.core.scan.wrappers.ByteArrayWrapper;
+
+/**
+ * It is not a collector it is just a scanned result holder.
+ */
+public abstract class AbstractScannedResultCollector implements 
ScannedResultCollector {
+
+  private static final LogService LOGGER =
+      
LogServiceFactory.getLogService(AbstractScannedResultCollector.class.getName());
+
+  /**
+   * restructuring info
+   */
+  private KeyStructureInfo restructureInfos;
+
+  /**
+   * table block execution infos
+   */
+  protected BlockExecutionInfo tableBlockExecutionInfos;
+
+  /**
+   * Measure ordinals
+   */
+  protected int[] measuresOrdinal;
+
+  /**
+   * to check whether measure exists in current table block or not this to
+   * handle restructuring scenario
+   */
+  protected boolean[] isMeasureExistsInCurrentBlock;
+
+  /**
+   * default value of the measures in case of restructuring some measure wont
+   * be present in the table so in that default value will be used to
+   * aggregate the data for that measure columns
+   */
+  private Object[] measureDefaultValue;
+
+  /**
+   * measure datatypes.
+   */
+  protected DataType[] measureDatatypes;
+
+  public AbstractScannedResultCollector(BlockExecutionInfo 
blockExecutionInfos) {
+    this.tableBlockExecutionInfos = blockExecutionInfos;
+    restructureInfos = blockExecutionInfos.getKeyStructureInfo();
+    measuresOrdinal = 
tableBlockExecutionInfos.getAggregatorInfo().getMeasureOrdinals();
+    isMeasureExistsInCurrentBlock = 
tableBlockExecutionInfos.getAggregatorInfo().getMeasureExists();
+    measureDefaultValue = 
tableBlockExecutionInfos.getAggregatorInfo().getDefaultValues();
+    this.measureDatatypes = 
tableBlockExecutionInfos.getAggregatorInfo().getMeasureDataTypes();
+  }
+
+  protected void fillMeasureData(Object[] msrValues, int offset,
+      AbstractScannedResult scannedResult) {
+    for (short i = 0; i < measuresOrdinal.length; i++) {
+      // if measure exists is block then pass measure column
+      // data chunk to the collector
+      if (isMeasureExistsInCurrentBlock[i]) {
+        msrValues[i + offset] = 
getMeasureData(scannedResult.getMeasureChunk(measuresOrdinal[i]),
+            scannedResult.getCurrenrRowId(), measureDatatypes[i]);
+      } else {
+        // if not then get the default value and use that value in aggregation
+        msrValues[i + offset] = measureDefaultValue[i];
+      }
+    }
+  }
+
+  private Object getMeasureData(MeasureColumnDataChunk dataChunk, int index, 
DataType dataType) {
+    if (!dataChunk.getNullValueIndexHolder().getBitSet().get(index)) {
+      switch (dataType) {
+        case SHORT:
+        case INT:
+        case LONG:
+          return 
dataChunk.getMeasureDataHolder().getReadableLongValueByIndex(index);
+        case DECIMAL:
+          return org.apache.spark.sql.types.Decimal
+              
.apply(dataChunk.getMeasureDataHolder().getReadableBigDecimalValueByIndex(index));
+        default:
+          return 
dataChunk.getMeasureDataHolder().getReadableDoubleValueByIndex(index);
+      }
+    }
+    return null;
+  }
+
+  /**
+   * Below method will used to get the result
+   */
+  protected void updateData(List<Object[]> listBasedResult) {
+    if (tableBlockExecutionInfos.isFixedKeyUpdateRequired()) {
+      updateKeyWithLatestBlockKeygenerator(listBasedResult);
+    }
+  }
+
+  /**
+   * Below method will be used to update the fixed length key with the
+   * latest block key generator
+   *
+   * @return updated block
+   */
+  private void updateKeyWithLatestBlockKeygenerator(List<Object[]> 
listBasedResult) {
+    try {
+      long[] data = null;
+      ByteArrayWrapper key = null;
+      for (int i = 0; i < listBasedResult.size(); i++) {
+        // get the key
+        key = (ByteArrayWrapper) listBasedResult.get(i)[0];
+        // unpack the key with table block key generator
+        data = tableBlockExecutionInfos.getBlockKeyGenerator()
+            .getKeyArray(key.getDictionaryKey(), 
tableBlockExecutionInfos.getMaskedByteForBlock());
+        // packed the key with latest block key generator
+        // and generate the masked key for that key
+        key.setDictionaryKey(QueryUtil
+            .getMaskedKey(restructureInfos.getKeyGenerator().generateKey(data),
+                restructureInfos.getMaxKey(), 
restructureInfos.getMaskByteRanges(),
+                restructureInfos.getMaskByteRanges().length));
+      }
+    } catch (KeyGenException e) {
+      LOGGER.error(e);
+    }
+  }
+
+  @Override public void collectVectorBatch(AbstractScannedResult scannedResult,
+      CarbonColumnarBatch columnarBatch) {
+    throw new UnsupportedOperationException("Works only for batch collectors");
+  }
+}

Reply via email to