Github user kumarvishal09 commented on a diff in the pull request:

    https://github.com/apache/carbondata/pull/2819#discussion_r225794608
  
    --- Diff: 
core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
 ---
    @@ -221,49 +229,66 @@ protected DimensionRawColumnChunk 
getDimensionRawColumnChunk(FileReader fileRead
         int offset = (int) rawColumnPage.getOffSet() + dimensionChunksLength
             .get(rawColumnPage.getColumnIndex()) + 
dataChunk3.getPage_offset().get(pageNumber);
         // first read the data and uncompressed it
    -    return decodeDimension(rawColumnPage, rawData, pageMetadata, offset);
    +    return decodeDimension(rawColumnPage, rawData, pageMetadata, offset, 
vectorInfo);
    +  }
    +
    +  @Override
    +  public void decodeColumnPageAndFillVector(DimensionRawColumnChunk 
dimensionRawColumnChunk,
    +      int pageNumber, ColumnVectorInfo vectorInfo) throws IOException, 
MemoryException {
    +    DimensionColumnPage columnPage =
    +        decodeColumnPage(dimensionRawColumnChunk, pageNumber, vectorInfo);
    +    columnPage.freeMemory();
       }
     
    -  private ColumnPage decodeDimensionByMeta(DataChunk2 pageMetadata,
    -      ByteBuffer pageData, int offset, boolean isLocalDictEncodedPage)
    +  private ColumnPage decodeDimensionByMeta(DataChunk2 pageMetadata, 
ByteBuffer pageData, int offset,
    +      boolean isLocalDictEncodedPage, ColumnVectorInfo vectorInfo, BitSet 
nullBitSet)
           throws IOException, MemoryException {
         List<Encoding> encodings = pageMetadata.getEncoders();
         List<ByteBuffer> encoderMetas = pageMetadata.getEncoder_meta();
         String compressorName = 
CarbonMetadataUtil.getCompressorNameFromChunkMeta(
             pageMetadata.getChunk_meta());
         ColumnPageDecoder decoder = encodingFactory.createDecoder(encodings, 
encoderMetas,
    -        compressorName);
    -    return decoder
    -        .decode(pageData.array(), offset, pageMetadata.data_page_length, 
isLocalDictEncodedPage);
    +        compressorName, vectorInfo != null);
    +    if (vectorInfo != null) {
    +      return decoder
    +          .decodeAndFillVector(pageData.array(), offset, 
pageMetadata.data_page_length, vectorInfo,
    +              nullBitSet, isLocalDictEncodedPage);
    +    } else {
    +      return decoder
    +          .decode(pageData.array(), offset, pageMetadata.data_page_length, 
isLocalDictEncodedPage);
    +    }
       }
     
       protected DimensionColumnPage decodeDimension(DimensionRawColumnChunk 
rawColumnPage,
    -      ByteBuffer pageData, DataChunk2 pageMetadata, int offset)
    +      ByteBuffer pageData, DataChunk2 pageMetadata, int offset, 
ColumnVectorInfo vectorInfo)
           throws IOException, MemoryException {
         List<Encoding> encodings = pageMetadata.getEncoders();
         if (CarbonUtil.isEncodedWithMeta(encodings)) {
    -      ColumnPage decodedPage = decodeDimensionByMeta(pageMetadata, 
pageData, offset,
    -          null != rawColumnPage.getLocalDictionary());
    -      
decodedPage.setNullBits(QueryUtil.getNullBitSet(pageMetadata.presence, 
this.compressor));
           int[] invertedIndexes = new int[0];
           int[] invertedIndexesReverse = new int[0];
           // in case of no dictionary measure data types, if it is included in 
sort columns
           // then inverted index to be uncompressed
    +      boolean isExplicitSorted =
    +          CarbonUtil.hasEncoding(pageMetadata.encoders, 
Encoding.INVERTED_INDEX);
    +      int dataOffset = offset;
           if (encodings.contains(Encoding.INVERTED_INDEX)) {
             offset += pageMetadata.data_page_length;
    -        if (CarbonUtil.hasEncoding(pageMetadata.encoders, 
Encoding.INVERTED_INDEX)) {
    +        if (isExplicitSorted) {
    --- End diff --
    
    This If check is not required as above If check is already checking whether 
it is explicit sorted or not 


---

Reply via email to