Repository: carbondata Updated Branches: refs/heads/master bfd77f69f -> c125f0caa
[CARBONDATA-2204] Optimized number of reads of tablestatus file while querying This PR avoid reading status file multiple times. For first time query, it reads 2 times(Needed for datamap refresher) and 1 time for second query onwards. This closes #1999 Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/c125f0ca Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/c125f0ca Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/c125f0ca Branch: refs/heads/master Commit: c125f0caa58f1e7cfa7d10b52ab53364f1895c35 Parents: bfd77f6 Author: ravipesala <ravi.pes...@gmail.com> Authored: Mon Feb 26 18:36:03 2018 +0530 Committer: Jacky Li <jacky.li...@qq.com> Committed: Sat Mar 3 13:50:16 2018 +0800 ---------------------------------------------------------------------- .../core/constants/CarbonCommonConstants.java | 4 - .../statusmanager/SegmentStatusManager.java | 122 +++++++++---------- .../SegmentUpdateStatusManager.java | 28 +++-- .../core/util/path/CarbonTablePath.java | 9 +- .../hadoop/api/CarbonTableInputFormat.java | 24 ++-- .../hadoop/test/util/StoreCreator.java | 8 +- .../presto/util/CarbonDataStoreCreator.scala | 2 +- .../carbondata/processing/StoreCreator.java | 4 +- 8 files changed, 108 insertions(+), 93 deletions(-) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/carbondata/blob/c125f0ca/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java index fa2b7d8..b2a3375 100644 --- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java +++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java @@ -650,10 +650,6 @@ public final class CarbonCommonConstants { */ public static final int DEFAULT_MAX_QUERY_EXECUTION_TIME = 60; /** - * LOADMETADATA_FILENAME - */ - public static final String LOADMETADATA_FILENAME = "tablestatus"; - /** * TABLE UPDATE STATUS FILENAME */ public static final String TABLEUPDATESTATUS_FILENAME = "tableupdatestatus"; http://git-wip-us.apache.org/repos/asf/carbondata/blob/c125f0ca/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java index 76c2dc7..ab849ce 100755 --- a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java +++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java @@ -98,6 +98,15 @@ public class SegmentStatusManager { * @throws IOException */ public ValidAndInvalidSegmentsInfo getValidAndInvalidSegments() throws IOException { + return getValidAndInvalidSegments(null); + } + + /** + * get valid segment for given load status details. + * + */ + public ValidAndInvalidSegmentsInfo getValidAndInvalidSegments( + LoadMetadataDetails[] loadMetadataDetails) throws IOException { // @TODO: move reading LoadStatus file to separate class List<Segment> listOfValidSegments = new ArrayList<>(10); @@ -108,73 +117,56 @@ public class SegmentStatusManager { CarbonTablePath carbonTablePath = CarbonStorePath .getCarbonTablePath(absoluteTableIdentifier.getTablePath(), absoluteTableIdentifier.getCarbonTableIdentifier()); - String dataPath = carbonTablePath.getTableStatusFilePath(); - DataInputStream dataInputStream = null; - // Use GSON to deserialize the load information - Gson gson = new Gson(); - - AtomicFileOperations fileOperation = - new AtomicFileOperationsImpl(dataPath, FileFactory.getFileType(dataPath)); - LoadMetadataDetails[] loadFolderDetailsArray; try { - if (FileFactory.isFileExist(dataPath, FileFactory.getFileType(dataPath))) { - dataInputStream = fileOperation.openForRead(); - BufferedReader buffReader = - new BufferedReader(new InputStreamReader(dataInputStream, "UTF-8")); - loadFolderDetailsArray = gson.fromJson(buffReader, LoadMetadataDetails[].class); - // if loadFolderDetailsArray is null, assign a empty array - if (null == loadFolderDetailsArray) { - loadFolderDetailsArray = new LoadMetadataDetails[0]; - } - //just directly iterate Array - for (LoadMetadataDetails segment : loadFolderDetailsArray) { - if (SegmentStatus.SUCCESS == segment.getSegmentStatus() - || SegmentStatus.MARKED_FOR_UPDATE == segment.getSegmentStatus() - || SegmentStatus.LOAD_PARTIAL_SUCCESS == segment.getSegmentStatus() - || SegmentStatus.STREAMING == segment.getSegmentStatus() - || SegmentStatus.STREAMING_FINISH == segment.getSegmentStatus()) { - // check for merged loads. - if (null != segment.getMergedLoadName()) { - Segment seg = new Segment(segment.getMergedLoadName(), segment.getSegmentFile()); - if (!listOfValidSegments.contains(seg)) { - listOfValidSegments.add(seg); - } - // if merged load is updated then put it in updated list - if (SegmentStatus.MARKED_FOR_UPDATE == segment.getSegmentStatus()) { - listOfValidUpdatedSegments.add(seg); - } - continue; + if (loadMetadataDetails == null) { + loadMetadataDetails = readTableStatusFile(carbonTablePath.getTableStatusFilePath()); + } + //just directly iterate Array + for (LoadMetadataDetails segment : loadMetadataDetails) { + if (SegmentStatus.SUCCESS == segment.getSegmentStatus() + || SegmentStatus.MARKED_FOR_UPDATE == segment.getSegmentStatus() + || SegmentStatus.LOAD_PARTIAL_SUCCESS == segment.getSegmentStatus() + || SegmentStatus.STREAMING == segment.getSegmentStatus() + || SegmentStatus.STREAMING_FINISH == segment.getSegmentStatus()) { + // check for merged loads. + if (null != segment.getMergedLoadName()) { + Segment seg = new Segment(segment.getMergedLoadName(), segment.getSegmentFile()); + if (!listOfValidSegments.contains(seg)) { + listOfValidSegments.add(seg); } - + // if merged load is updated then put it in updated list if (SegmentStatus.MARKED_FOR_UPDATE == segment.getSegmentStatus()) { - - listOfValidUpdatedSegments - .add(new Segment(segment.getLoadName(), segment.getSegmentFile())); - } - if (SegmentStatus.STREAMING == segment.getSegmentStatus() - || SegmentStatus.STREAMING_FINISH == segment.getSegmentStatus()) { - listOfStreamSegments - .add(new Segment(segment.getLoadName(), segment.getSegmentFile())); - continue; + listOfValidUpdatedSegments.add(seg); } - listOfValidSegments.add(new Segment(segment.getLoadName(), segment.getSegmentFile())); - } else if ((SegmentStatus.LOAD_FAILURE == segment.getSegmentStatus() - || SegmentStatus.COMPACTED == segment.getSegmentStatus() - || SegmentStatus.MARKED_FOR_DELETE == segment.getSegmentStatus())) { - listOfInvalidSegments.add(new Segment(segment.getLoadName(), segment.getSegmentFile())); - } else if (SegmentStatus.INSERT_IN_PROGRESS == segment.getSegmentStatus() || - SegmentStatus.INSERT_OVERWRITE_IN_PROGRESS == segment.getSegmentStatus()) { - listOfInProgressSegments + continue; + } + + if (SegmentStatus.MARKED_FOR_UPDATE == segment.getSegmentStatus()) { + + listOfValidUpdatedSegments .add(new Segment(segment.getLoadName(), segment.getSegmentFile())); } + if (SegmentStatus.STREAMING == segment.getSegmentStatus() + || SegmentStatus.STREAMING_FINISH == segment.getSegmentStatus()) { + listOfStreamSegments + .add(new Segment(segment.getLoadName(), segment.getSegmentFile())); + continue; + } + listOfValidSegments.add(new Segment(segment.getLoadName(), segment.getSegmentFile())); + } else if ((SegmentStatus.LOAD_FAILURE == segment.getSegmentStatus() + || SegmentStatus.COMPACTED == segment.getSegmentStatus() + || SegmentStatus.MARKED_FOR_DELETE == segment.getSegmentStatus())) { + listOfInvalidSegments.add(new Segment(segment.getLoadName(), segment.getSegmentFile())); + } else if (SegmentStatus.INSERT_IN_PROGRESS == segment.getSegmentStatus() || + SegmentStatus.INSERT_OVERWRITE_IN_PROGRESS == segment.getSegmentStatus()) { + listOfInProgressSegments + .add(new Segment(segment.getLoadName(), segment.getSegmentFile())); } } } catch (IOException e) { LOG.error(e); throw e; - } finally { - CarbonUtil.closeStreams(dataInputStream); } return new ValidAndInvalidSegmentsInfo(listOfValidSegments, listOfValidUpdatedSegments, listOfInvalidSegments, listOfStreamSegments, listOfInProgressSegments); @@ -188,26 +180,32 @@ public class SegmentStatusManager { */ public static LoadMetadataDetails[] readLoadMetadata(String metadataFolderPath) { String metadataFileName = metadataFolderPath + CarbonCommonConstants.FILE_SEPARATOR - + CarbonCommonConstants.LOADMETADATA_FILENAME; - return readTableStatusFile(metadataFileName); + + CarbonTablePath.TABLE_STATUS_FILE; + try { + return readTableStatusFile(metadataFileName); + } catch (IOException e) { + return new LoadMetadataDetails[0]; + } } /** * Reads the table status file with the specified UUID if non empty. */ - public static LoadMetadataDetails[] readLoadMetadata(String metaDataFolderPath, String uuid) { + public static LoadMetadataDetails[] readLoadMetadata(String metaDataFolderPath, String uuid) + throws IOException { String tableStatusFileName; if (uuid.isEmpty()) { tableStatusFileName = metaDataFolderPath + CarbonCommonConstants.FILE_SEPARATOR - + CarbonCommonConstants.LOADMETADATA_FILENAME; + + CarbonTablePath.TABLE_STATUS_FILE; } else { tableStatusFileName = metaDataFolderPath + CarbonCommonConstants.FILE_SEPARATOR - + CarbonCommonConstants.LOADMETADATA_FILENAME + CarbonCommonConstants.UNDERSCORE + uuid; + + CarbonTablePath.TABLE_STATUS_FILE + CarbonCommonConstants.UNDERSCORE + uuid; } return readTableStatusFile(tableStatusFileName); } - public static LoadMetadataDetails[] readTableStatusFile(String tableStatusPath) { + public static LoadMetadataDetails[] readTableStatusFile(String tableStatusPath) + throws IOException { Gson gsonObjectToRead = new Gson(); DataInputStream dataInputStream = null; BufferedReader buffReader = null; @@ -228,7 +226,7 @@ public class SegmentStatusManager { gsonObjectToRead.fromJson(buffReader, LoadMetadataDetails[].class); } catch (IOException e) { LOG.error(e, "Failed to read metadata of load"); - return new LoadMetadataDetails[0]; + throw e; } finally { closeStreams(buffReader, inStream, dataInputStream); } http://git-wip-us.apache.org/repos/asf/carbondata/blob/c125f0ca/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java index 71b6ba8..3fc2813 100644 --- a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java +++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentUpdateStatusManager.java @@ -73,6 +73,21 @@ public class SegmentUpdateStatusManager { private Map<String, SegmentUpdateDetails> blockAndDetailsMap; private boolean isPartitionTable; + public SegmentUpdateStatusManager(AbsoluteTableIdentifier absoluteTableIdentifier, + LoadMetadataDetails[] segmentDetails) { + this.absoluteTableIdentifier = absoluteTableIdentifier; + carbonTablePath = CarbonStorePath.getCarbonTablePath(absoluteTableIdentifier.getTablePath(), + absoluteTableIdentifier.getCarbonTableIdentifier()); + // current it is used only for read function scenarios, as file update always requires to work + // on latest file status. + this.segmentDetails = segmentDetails; + if (segmentDetails.length > 0) { + isPartitionTable = segmentDetails[0].getSegmentFile() != null; + } + updateDetails = readLoadMetadata(); + populateMap(); + } + /** * @param absoluteTableIdentifier */ @@ -80,11 +95,10 @@ public class SegmentUpdateStatusManager { this.absoluteTableIdentifier = absoluteTableIdentifier; carbonTablePath = CarbonStorePath.getCarbonTablePath(absoluteTableIdentifier.getTablePath(), absoluteTableIdentifier.getCarbonTableIdentifier()); - SegmentStatusManager segmentStatusManager = new SegmentStatusManager(absoluteTableIdentifier); // current it is used only for read function scenarios, as file update always requires to work // on latest file status. segmentDetails = - segmentStatusManager.readLoadMetadata(carbonTablePath.getMetadataDirectoryPath()); + SegmentStatusManager.readLoadMetadata(carbonTablePath.getMetadataDirectoryPath()); if (segmentDetails.length > 0) { isPartitionTable = segmentDetails[0].getSegmentFile() != null; } @@ -732,16 +746,10 @@ public class SegmentUpdateStatusManager { * @return updateStatusFileName */ private String getUpdatedStatusIdentifier() { - SegmentStatusManager ssm = new SegmentStatusManager(absoluteTableIdentifier); - CarbonTablePath carbonTablePath = CarbonStorePath - .getCarbonTablePath(absoluteTableIdentifier.getTablePath(), - absoluteTableIdentifier.getCarbonTableIdentifier()); - LoadMetadataDetails[] loadDetails = - ssm.readLoadMetadata(carbonTablePath.getMetadataDirectoryPath()); - if (loadDetails.length == 0) { + if (segmentDetails.length == 0) { return null; } - return loadDetails[0].getUpdateStatusFileName(); + return segmentDetails[0].getUpdateStatusFileName(); } /** http://git-wip-us.apache.org/repos/asf/carbondata/blob/c125f0ca/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java ---------------------------------------------------------------------- diff --git a/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java b/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java index b5fe5ea..cb264c4 100644 --- a/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java +++ b/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java @@ -38,7 +38,7 @@ public class CarbonTablePath extends Path { private static final String DICTIONARY_META_EXT = ".dictmeta"; private static final String SORT_INDEX_EXT = ".sortindex"; private static final String SCHEMA_FILE = "schema"; - private static final String TABLE_STATUS_FILE = "tablestatus"; + public static final String TABLE_STATUS_FILE = "tablestatus"; private static final String FACT_DIR = "Fact"; private static final String SEGMENT_PREFIX = "Segment_"; private static final String PARTITION_PREFIX = "Part"; @@ -177,6 +177,13 @@ public class CarbonTablePath extends Path { } /** + * Return table status file path based on `tablePath` + */ + public static String getTableStatusFilePath(String tablePath) { + return getMetadataPath(tablePath) + CarbonCommonConstants.FILE_SEPARATOR + TABLE_STATUS_FILE; + } + + /** * @param columnId unique column identifier * @return absolute path of dictionary meta file */ http://git-wip-us.apache.org/repos/asf/carbondata/blob/c125f0ca/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableInputFormat.java ---------------------------------------------------------------------- diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableInputFormat.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableInputFormat.java index 3dbf04f..f6624cd 100644 --- a/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableInputFormat.java +++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/api/CarbonTableInputFormat.java @@ -64,6 +64,7 @@ import org.apache.carbondata.core.stats.QueryStatistic; import org.apache.carbondata.core.stats.QueryStatisticsConstants; import org.apache.carbondata.core.stats.QueryStatisticsRecorder; import org.apache.carbondata.core.statusmanager.FileFormat; +import org.apache.carbondata.core.statusmanager.LoadMetadataDetails; import org.apache.carbondata.core.statusmanager.SegmentStatusManager; import org.apache.carbondata.core.statusmanager.SegmentUpdateStatusManager; import org.apache.carbondata.core.util.CarbonProperties; @@ -341,7 +342,10 @@ public class CarbonTableInputFormat<T> extends FileInputFormat<Void, T> { */ @Override public List<InputSplit> getSplits(JobContext job) throws IOException { AbsoluteTableIdentifier identifier = getAbsoluteTableIdentifier(job.getConfiguration()); - SegmentUpdateStatusManager updateStatusManager = new SegmentUpdateStatusManager(identifier); + LoadMetadataDetails[] loadMetadataDetails = SegmentStatusManager + .readTableStatusFile(CarbonTablePath.getTableStatusFilePath(identifier.getTablePath())); + SegmentUpdateStatusManager updateStatusManager = + new SegmentUpdateStatusManager(identifier, loadMetadataDetails); CarbonTable carbonTable = getOrCreateCarbonTable(job.getConfiguration()); if (null == carbonTable) { throw new IOException("Missing/Corrupt schema file for table."); @@ -355,7 +359,7 @@ public class CarbonTableInputFormat<T> extends FileInputFormat<Void, T> { // get all valid segments and set them into the configuration SegmentStatusManager segmentStatusManager = new SegmentStatusManager(identifier); SegmentStatusManager.ValidAndInvalidSegmentsInfo segments = - segmentStatusManager.getValidAndInvalidSegments(); + segmentStatusManager.getValidAndInvalidSegments(loadMetadataDetails); if (getValidateSegmentsToAccess(job.getConfiguration())) { List<Segment> validSegments = segments.getValidSegments(); @@ -435,7 +439,7 @@ public class CarbonTableInputFormat<T> extends FileInputFormat<Void, T> { // do block filtering and get split List<InputSplit> splits = getSplits(job, filterInterface, filteredSegmentToAccess, matchedPartitions, partitionInfo, - null); + null, updateStatusManager); // pass the invalid segment to task side in order to remove index entry in task side if (invalidSegments.size() > 0) { for (InputSplit split : splits) { @@ -616,7 +620,7 @@ public class CarbonTableInputFormat<T> extends FileInputFormat<Void, T> { CarbonInputFormatUtil.resolveFilter(filter, identifier, tableProvider); // do block filtering and get split List<InputSplit> splits = getSplits(job, filterInterface, segmentList, matchedPartitions, - partitionInfo, oldPartitionIdList); + partitionInfo, oldPartitionIdList, new SegmentUpdateStatusManager(identifier)); // pass the invalid segment to task side in order to remove index entry in task side if (invalidSegments.size() > 0) { for (InputSplit split : splits) { @@ -667,7 +671,8 @@ public class CarbonTableInputFormat<T> extends FileInputFormat<Void, T> { */ private List<InputSplit> getSplits(JobContext job, FilterResolverIntf filterResolver, List<Segment> validSegments, BitSet matchedPartitions, PartitionInfo partitionInfo, - List<Integer> oldPartitionIdList) throws IOException { + List<Integer> oldPartitionIdList, SegmentUpdateStatusManager updateStatusManager) + throws IOException { List<InputSplit> result = new LinkedList<InputSplit>(); UpdateVO invalidBlockVOForSegmentId = null; @@ -675,8 +680,6 @@ public class CarbonTableInputFormat<T> extends FileInputFormat<Void, T> { AbsoluteTableIdentifier absoluteTableIdentifier = getOrCreateCarbonTable(job.getConfiguration()).getAbsoluteTableIdentifier(); - SegmentUpdateStatusManager updateStatusManager = - new SegmentUpdateStatusManager(absoluteTableIdentifier); isIUDTable = (updateStatusManager.getUpdateStatusDetails().length != 0); @@ -930,9 +933,12 @@ public class CarbonTableInputFormat<T> extends FileInputFormat<Void, T> { List<PartitionSpec> partitions) throws IOException { TableDataMap blockletMap = DataMapStoreManager.getInstance() .getDataMap(identifier, BlockletDataMap.NAME, BlockletDataMapFactory.class.getName()); - SegmentUpdateStatusManager updateStatusManager = new SegmentUpdateStatusManager(identifier); + LoadMetadataDetails[] loadMetadataDetails = SegmentStatusManager + .readTableStatusFile(CarbonTablePath.getTableStatusFilePath(identifier.getTablePath())); + SegmentUpdateStatusManager updateStatusManager = + new SegmentUpdateStatusManager(identifier, loadMetadataDetails); SegmentStatusManager.ValidAndInvalidSegmentsInfo allSegments = - new SegmentStatusManager(identifier).getValidAndInvalidSegments(); + new SegmentStatusManager(identifier).getValidAndInvalidSegments(loadMetadataDetails); Map<String, Long> blockRowCountMapping = new HashMap<>(); Map<String, Long> segmentAndBlockCountMapping = new HashMap<>(); http://git-wip-us.apache.org/repos/asf/carbondata/blob/c125f0ca/hadoop/src/test/java/org/apache/carbondata/hadoop/test/util/StoreCreator.java ---------------------------------------------------------------------- diff --git a/hadoop/src/test/java/org/apache/carbondata/hadoop/test/util/StoreCreator.java b/hadoop/src/test/java/org/apache/carbondata/hadoop/test/util/StoreCreator.java index fbf33d6..7e58f97 100644 --- a/hadoop/src/test/java/org/apache/carbondata/hadoop/test/util/StoreCreator.java +++ b/hadoop/src/test/java/org/apache/carbondata/hadoop/test/util/StoreCreator.java @@ -74,15 +74,15 @@ import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortIndexWrit import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortIndexWriterImpl; import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortInfo; import org.apache.carbondata.core.writer.sortindex.CarbonDictionarySortInfoPreparator; -import org.apache.carbondata.processing.util.TableOptionConstant; +import org.apache.carbondata.processing.loading.DataLoadExecutor; +import org.apache.carbondata.processing.loading.constants.DataLoadProcessorConstants; import org.apache.carbondata.processing.loading.csvinput.BlockDetails; import org.apache.carbondata.processing.loading.csvinput.CSVInputFormat; import org.apache.carbondata.processing.loading.csvinput.CSVRecordReaderIterator; import org.apache.carbondata.processing.loading.csvinput.StringArrayWritable; import org.apache.carbondata.processing.loading.model.CarbonDataLoadSchema; import org.apache.carbondata.processing.loading.model.CarbonLoadModel; -import org.apache.carbondata.processing.loading.DataLoadExecutor; -import org.apache.carbondata.processing.loading.constants.DataLoadProcessorConstants; +import org.apache.carbondata.processing.util.TableOptionConstant; import com.google.gson.Gson; import org.apache.hadoop.conf.Configuration; @@ -471,7 +471,7 @@ public class StoreCreator { listOfLoadFolderDetails.add(loadMetadataDetails); String dataLoadLocation = schema.getCarbonTable().getMetaDataFilepath() + File.separator - + CarbonCommonConstants.LOADMETADATA_FILENAME; + + CarbonTablePath.TABLE_STATUS_FILE; DataOutputStream dataOutputStream; Gson gsonObjectToWrite = new Gson(); http://git-wip-us.apache.org/repos/asf/carbondata/blob/c125f0ca/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala ---------------------------------------------------------------------- diff --git a/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala b/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala index 7b5c311..9d82d42 100644 --- a/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala +++ b/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala @@ -535,7 +535,7 @@ object CarbonDataStoreCreator { loadMetadataDetails.getTimeStamp(readCurrentTime())) listOfLoadFolderDetails.add(loadMetadataDetails) val dataLoadLocation: String = schema.getCarbonTable.getMetaDataFilepath + File.separator + - CarbonCommonConstants.LOADMETADATA_FILENAME + CarbonTablePath.TABLE_STATUS_FILE val gsonObjectToWrite: Gson = new Gson() val writeOperation: AtomicFileOperations = new AtomicFileOperationsImpl( dataLoadLocation, http://git-wip-us.apache.org/repos/asf/carbondata/blob/c125f0ca/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java ---------------------------------------------------------------------- diff --git a/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java b/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java index e662757..e93227d 100644 --- a/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java +++ b/processing/src/test/java/org/apache/carbondata/processing/StoreCreator.java @@ -43,6 +43,7 @@ import org.apache.carbondata.core.datastore.impl.FileFactory; import org.apache.carbondata.core.fileoperations.AtomicFileOperations; import org.apache.carbondata.core.fileoperations.AtomicFileOperationsImpl; import org.apache.carbondata.core.fileoperations.FileWriteOperation; +import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier; import org.apache.carbondata.core.metadata.CarbonMetadata; import org.apache.carbondata.core.metadata.CarbonTableIdentifier; import org.apache.carbondata.core.metadata.ColumnIdentifier; @@ -52,7 +53,6 @@ import org.apache.carbondata.core.metadata.datatype.DataTypes; import org.apache.carbondata.core.metadata.encoder.Encoding; import org.apache.carbondata.core.metadata.schema.SchemaEvolution; import org.apache.carbondata.core.metadata.schema.SchemaEvolutionEntry; -import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier; import org.apache.carbondata.core.metadata.schema.table.CarbonTable; import org.apache.carbondata.core.metadata.schema.table.TableInfo; import org.apache.carbondata.core.metadata.schema.table.TableSchema; @@ -446,7 +446,7 @@ public class StoreCreator { listOfLoadFolderDetails.add(loadMetadataDetails); String dataLoadLocation = schema.getCarbonTable().getMetaDataFilepath() + File.separator - + CarbonCommonConstants.LOADMETADATA_FILENAME; + + CarbonTablePath.TABLE_STATUS_FILE; DataOutputStream dataOutputStream; Gson gsonObjectToWrite = new Gson();