carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From kumarvisha...@apache.org
Subject carbondata git commit: [CARBONDATA-1224] Added page level reader instead of reading whole blocklet in V3
Date Tue, 30 Jan 2018 16:42:28 GMT
Repository: carbondata
Updated Branches:
  refs/heads/master 181c280b7 -> d509f17fb


[CARBONDATA-1224] Added page level reader instead of reading whole blocklet in V3

Problem:In V3 format we read the whole blocklet at once to memory in order save IO time. But it turns out to be costlier in case of parallel reading of more carbondata files.For example if we need to compact 50 segments then compactor need to open the readers on all the 50 segments to do merge sort. But the memory consumption is too high if each reader reads whole blocklet to the memory and there is high chances of going out of memory.Solution:In this type of scenarios we can introduce new readers for V3 to read the data page by page instead of reading whole blocklet at once to reduce the memory footprint.

This closes #1089


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/d509f17f
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/d509f17f
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/d509f17f

Branch: refs/heads/master
Commit: d509f17fbbf31b4baef23821f700bcbbfc987001
Parents: 181c280
Author: ravipesala <ravi.pesala@gmail.com>
Authored: Mon Jan 22 23:17:26 2018 +0530
Committer: kumarvishal <kumarvishal.1802@gmail.com>
Committed: Tue Jan 30 22:12:09 2018 +0530

----------------------------------------------------------------------
 .../core/constants/CarbonCommonConstants.java   |  11 ++
 .../carbondata/core/datastore/FileHolder.java   |  14 +-
 .../datastore/chunk/AbstractRawColumnChunk.java |   6 +-
 .../chunk/impl/DimensionRawColumnChunk.java     |  17 +-
 .../chunk/impl/MeasureRawColumnChunk.java       |  18 +-
 .../chunk/reader/CarbonDataReaderFactory.java   |  22 ++-
 ...mpressedDimensionChunkFileBasedReaderV1.java |   2 +-
 ...mpressedDimensionChunkFileBasedReaderV2.java |   2 +-
 ...essedDimChunkFileBasedPageLevelReaderV3.java | 168 +++++++++++++++++++
 ...mpressedDimensionChunkFileBasedReaderV3.java |  36 ++--
 ...CompressedMeasureChunkFileBasedReaderV1.java |   2 +-
 ...CompressedMeasureChunkFileBasedReaderV2.java |   2 +-
 ...CompressedMeasureChunkFileBasedReaderV3.java |  39 ++---
 ...essedMsrChunkFileBasedPageLevelReaderV3.java | 153 +++++++++++++++++
 .../core/datastore/impl/DFSFileHolderImpl.java  |  16 +-
 .../core/datastore/impl/FileHolderImpl.java     |  19 +--
 .../impl/btree/BlockletBTreeLeafNode.java       |  54 +++++-
 .../BlockletDataRefNodeWrapper.java             |  38 +++--
 .../executor/impl/AbstractQueryExecutor.java    |   1 +
 .../scan/executor/infos/BlockExecutionInfo.java |  13 ++
 .../carbondata/core/scan/model/QueryModel.java  |  13 ++
 .../processor/AbstractDataBlockIterator.java    |  25 ++-
 .../core/scan/result/AbstractScannedResult.java |  69 +++++++-
 .../AbstractDetailQueryResultIterator.java      |   1 +
 .../scan/scanner/AbstractBlockletScanner.java   |  38 +++--
 .../core/scan/scanner/impl/FilterScanner.java   |   3 +-
 .../apache/carbondata/core/util/CarbonUtil.java |  17 ++
 .../merger/CarbonCompactionExecutor.java        |  19 +++
 28 files changed, 682 insertions(+), 136 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index cf95dd9..77e8db8 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -1556,6 +1556,17 @@ public final class CarbonCommonConstants {
   // the maximum characters that can be supported should be less than Short max value
   public static final int MAX_CHARS_PER_COLUMN_DEFAULT = 32000;
 
+  /**
+   * Enabling page level reader for compaction reduces the memory usage while compacting more
+   * number of segments. It allows reading only page by page instead of reaing whole blocklet to
+   * memory.
+   */
+  @CarbonProperty
+  public static final String CARBON_ENABLE_PAGE_LEVEL_READER_IN_COMPACTION =
+      "carbon.enable.page.level.reader.in.compaction";
+
+  public static final String CARBON_ENABLE_PAGE_LEVEL_READER_IN_COMPACTION_DEFAULT = "true";
+
   private CarbonCommonConstants() {
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/FileHolder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/FileHolder.java b/core/src/main/java/org/apache/carbondata/core/datastore/FileHolder.java
index 712e116..b4130a0 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/FileHolder.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/FileHolder.java
@@ -17,7 +17,6 @@
 
 package org.apache.carbondata.core.datastore;
 
-import java.io.DataInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 
@@ -103,6 +102,17 @@ public interface FileHolder {
 
   String getQueryId();
 
-  DataInputStream getDataInputStream(String filePath, long offset) throws IOException;
+  /**
+   * Set the flag to read data page by page instead of whole blocklet.
+   *
+   * @param isReadPageByPage
+   */
+  void setReadPageByPage(boolean isReadPageByPage);
+
+  /**
+   * Whether read the data page by page from carbondata file instead of reading whole
+   * blocklet to memory. It is need in case of memory constraint operations.
+   */
+  boolean isReadPageByPage();
 
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/AbstractRawColumnChunk.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/AbstractRawColumnChunk.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/AbstractRawColumnChunk.java
index d1362c2..05ac9ff 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/AbstractRawColumnChunk.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/AbstractRawColumnChunk.java
@@ -39,13 +39,13 @@ public abstract class AbstractRawColumnChunk {
 
   protected int columnIndex;
 
-  private int offSet;
+  private long offSet;
 
   protected int length;
 
   private DataChunk3 dataChunkV3;
 
-  public AbstractRawColumnChunk(int columnIndex, ByteBuffer rawData, int offSet, int length) {
+  public AbstractRawColumnChunk(int columnIndex, ByteBuffer rawData, long offSet, int length) {
     this.columnIndex = columnIndex;
     this.rawData = rawData;
     this.offSet = offSet;
@@ -102,7 +102,7 @@ public abstract class AbstractRawColumnChunk {
     return columnIndex;
   }
 
-  public int getOffSet() {
+  public long getOffSet() {
     return offSet;
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/DimensionRawColumnChunk.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/DimensionRawColumnChunk.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/DimensionRawColumnChunk.java
index cb112c1..705c13c 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/DimensionRawColumnChunk.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/DimensionRawColumnChunk.java
@@ -39,7 +39,7 @@ public class DimensionRawColumnChunk extends AbstractRawColumnChunk {
 
   private FileHolder fileHolder;
 
-  public DimensionRawColumnChunk(int columnIndex, ByteBuffer rawData, int offSet, int length,
+  public DimensionRawColumnChunk(int columnIndex, ByteBuffer rawData, long offSet, int length,
       DimensionColumnChunkReader columnChunkReader) {
     super(columnIndex, rawData, offSet, length);
     this.chunkReader = columnChunkReader;
@@ -86,6 +86,21 @@ public class DimensionRawColumnChunk extends AbstractRawColumnChunk {
     return dataChunks[index];
   }
 
+  /**
+   * Convert raw data with specified page number processed to DimensionColumnDataChunk
+   *
+   * @param index
+   * @return
+   */
+  public DimensionColumnDataChunk convertToDimColDataChunkWithOutCache(int index) {
+    assert index < pagesCount;
+    try {
+      return chunkReader.convertToDimensionChunk(this, index);
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+  }
+
   @Override public void freeMemory() {
     if (null != dataChunks) {
       for (int i = 0; i < dataChunks.length; i++) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/MeasureRawColumnChunk.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/MeasureRawColumnChunk.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/MeasureRawColumnChunk.java
index d41cf09..fa0777b 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/MeasureRawColumnChunk.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/impl/MeasureRawColumnChunk.java
@@ -39,7 +39,7 @@ public class MeasureRawColumnChunk extends AbstractRawColumnChunk {
 
   private FileHolder fileReader;
 
-  public MeasureRawColumnChunk(int columnIndex, ByteBuffer rawData, int offSet, int length,
+  public MeasureRawColumnChunk(int columnIndex, ByteBuffer rawData, long offSet, int length,
       MeasureColumnChunkReader chunkReader) {
     super(columnIndex, rawData, offSet, length);
     this.chunkReader = chunkReader;
@@ -85,6 +85,22 @@ public class MeasureRawColumnChunk extends AbstractRawColumnChunk {
     return columnPages[columnIndex];
   }
 
+  /**
+   * Convert raw data with specified page number processed to MeasureColumnDataChunk
+   *
+   * @param index
+   * @return
+   */
+  public ColumnPage convertToColumnPageWithOutCache(int index) {
+    assert index < pagesCount;
+
+    try {
+      return chunkReader.convertToColumnPage(this, index);
+    } catch (IOException | MemoryException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
   @Override public void freeMemory() {
     if (null != columnPages) {
       for (int i = 0; i < columnPages.length; i++) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/CarbonDataReaderFactory.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/CarbonDataReaderFactory.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/CarbonDataReaderFactory.java
index 8fee760..07cd7b4 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/CarbonDataReaderFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/CarbonDataReaderFactory.java
@@ -18,10 +18,12 @@ package org.apache.carbondata.core.datastore.chunk.reader;
 
 import org.apache.carbondata.core.datastore.chunk.reader.dimension.v1.CompressedDimensionChunkFileBasedReaderV1;
 import org.apache.carbondata.core.datastore.chunk.reader.dimension.v2.CompressedDimensionChunkFileBasedReaderV2;
+import org.apache.carbondata.core.datastore.chunk.reader.dimension.v3.CompressedDimChunkFileBasedPageLevelReaderV3;
 import org.apache.carbondata.core.datastore.chunk.reader.dimension.v3.CompressedDimensionChunkFileBasedReaderV3;
 import org.apache.carbondata.core.datastore.chunk.reader.measure.v1.CompressedMeasureChunkFileBasedReaderV1;
 import org.apache.carbondata.core.datastore.chunk.reader.measure.v2.CompressedMeasureChunkFileBasedReaderV2;
 import org.apache.carbondata.core.datastore.chunk.reader.measure.v3.CompressedMeasureChunkFileBasedReaderV3;
+import org.apache.carbondata.core.datastore.chunk.reader.measure.v3.CompressedMsrChunkFileBasedPageLevelReaderV3;
 import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
 import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
 
@@ -62,7 +64,8 @@ public class CarbonDataReaderFactory {
    * @return dimension column data reader based on version number
    */
   public DimensionColumnChunkReader getDimensionColumnChunkReader(ColumnarFormatVersion version,
-      BlockletInfo blockletInfo, int[] eachColumnValueSize, String filePath) {
+      BlockletInfo blockletInfo, int[] eachColumnValueSize, String filePath,
+      boolean readPagebyPage) {
     switch (version) {
       case V1:
         return new CompressedDimensionChunkFileBasedReaderV1(blockletInfo, eachColumnValueSize,
@@ -72,8 +75,13 @@ public class CarbonDataReaderFactory {
             filePath);
       case V3:
       default:
-        return new CompressedDimensionChunkFileBasedReaderV3(blockletInfo, eachColumnValueSize,
-            filePath);
+        if (readPagebyPage) {
+          return new CompressedDimChunkFileBasedPageLevelReaderV3(blockletInfo, eachColumnValueSize,
+              filePath);
+        } else {
+          return new CompressedDimensionChunkFileBasedReaderV3(blockletInfo, eachColumnValueSize,
+              filePath);
+        }
     }
   }
 
@@ -86,7 +94,7 @@ public class CarbonDataReaderFactory {
    * @return measure column data reader based on version number
    */
   public MeasureColumnChunkReader getMeasureColumnChunkReader(ColumnarFormatVersion version,
-      BlockletInfo blockletInfo, String filePath) {
+      BlockletInfo blockletInfo, String filePath, boolean readPagebyPage) {
     switch (version) {
       case V1:
         return new CompressedMeasureChunkFileBasedReaderV1(blockletInfo, filePath);
@@ -94,7 +102,11 @@ public class CarbonDataReaderFactory {
         return new CompressedMeasureChunkFileBasedReaderV2(blockletInfo, filePath);
       case V3:
       default:
-        return new CompressedMeasureChunkFileBasedReaderV3(blockletInfo, filePath);
+        if (readPagebyPage) {
+          return new CompressedMsrChunkFileBasedPageLevelReaderV3(blockletInfo, filePath);
+        } else {
+          return new CompressedMeasureChunkFileBasedReaderV3(blockletInfo, filePath);
+        }
 
     }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
index 3e45082..27a4d89 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
@@ -108,7 +108,7 @@ public class CompressedDimensionChunkFileBasedReaderV1 extends AbstractChunkRead
     FileHolder fileReader = dimensionRawColumnChunk.getFileReader();
 
     ByteBuffer rawData = dimensionRawColumnChunk.getRawData();
-    dataPage = COMPRESSOR.unCompressByte(rawData.array(), dimensionRawColumnChunk.getOffSet(),
+    dataPage = COMPRESSOR.unCompressByte(rawData.array(), (int) dimensionRawColumnChunk.getOffSet(),
         dimensionRawColumnChunk.getLength());
 
     // if row id block is present then read the row id chunk and uncompress it

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
index 0dea099..b43f89c 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
@@ -122,7 +122,7 @@ public class CompressedDimensionChunkFileBasedReaderV2 extends AbstractChunkRead
     int[] invertedIndexesReverse = null;
     int[] rlePage = null;
     DataChunk2 dimensionColumnChunk = null;
-    int copySourcePoint = dimensionRawColumnChunk.getOffSet();
+    int copySourcePoint = (int) dimensionRawColumnChunk.getOffSet();
     int blockIndex = dimensionRawColumnChunk.getColumnIndex();
     ByteBuffer rawData = dimensionRawColumnChunk.getRawData();
     if (dimensionChunksOffset.size() - 1 == blockIndex) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimChunkFileBasedPageLevelReaderV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimChunkFileBasedPageLevelReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimChunkFileBasedPageLevelReaderV3.java
new file mode 100644
index 0000000..1edfd09
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimChunkFileBasedPageLevelReaderV3.java
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.carbondata.core.datastore.chunk.reader.dimension.v3;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.carbondata.core.datastore.FileHolder;
+import org.apache.carbondata.core.datastore.chunk.DimensionColumnDataChunk;
+import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
+import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
+import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.format.DataChunk2;
+import org.apache.carbondata.format.DataChunk3;
+import org.apache.carbondata.format.Encoding;
+
+/**
+ * Dimension column V3 Reader class which will be used to read and uncompress
+ * V3 format data. It reads the data in each page at once unlike whole blocklet. It is
+ * used for memory constraint operations like compaction.
+ * data format
+ * Data Format
+ * <FileHeader>
+ * <Column1 Data ChunkV3><Column1<Page1><Page2><Page3><Page4>>
+ * <Column2 Data ChunkV3><Column2<Page1><Page2><Page3><Page4>>
+ * <Column3 Data ChunkV3><Column3<Page1><Page2><Page3><Page4>>
+ * <Column4 Data ChunkV3><Column4<Page1><Page2><Page3><Page4>>
+ * <File Footer>
+ */
+public class CompressedDimChunkFileBasedPageLevelReaderV3
+    extends CompressedDimensionChunkFileBasedReaderV3 {
+
+  /**
+   * end position of last dimension in carbon data file
+   */
+  private long lastDimensionOffsets;
+
+  public CompressedDimChunkFileBasedPageLevelReaderV3(BlockletInfo blockletInfo,
+      int[] eachColumnValueSize, String filePath) {
+    super(blockletInfo, eachColumnValueSize, filePath);
+    lastDimensionOffsets = blockletInfo.getDimensionOffset();
+  }
+
+  /**
+   * Below method will be used to read the dimension column data form carbon data file
+   * Steps for reading
+   * 1. Get the length of the data to be read
+   * 2. Allocate the direct buffer
+   * 3. read the data from file
+   * 4. Get the data chunk object from data read
+   * 5. Create the raw chunk object and fill the details
+   *
+   * @param fileReader          reader for reading the column from carbon data file
+   * @param blockletColumnIndex blocklet index of the column in carbon data file
+   * @return dimension raw chunk
+   */
+  public DimensionRawColumnChunk readRawDimensionChunk(FileHolder fileReader,
+      int blockletColumnIndex) throws IOException {
+    // get the current dimension offset
+    long currentDimensionOffset = dimensionChunksOffset.get(blockletColumnIndex);
+    int length = 0;
+    // to calculate the length of the data to be read
+    // column other than last column we can subtract the offset of current column with
+    // next column and get the total length.
+    // but for last column we need to use lastDimensionOffset which is the end position
+    // of the last dimension, we can subtract current dimension offset from lastDimesionOffset
+    if (dimensionChunksOffset.size() - 1 == blockletColumnIndex) {
+      length = (int) (lastDimensionOffsets - currentDimensionOffset);
+    } else {
+      length = (int) (dimensionChunksOffset.get(blockletColumnIndex + 1) - currentDimensionOffset);
+    }
+    ByteBuffer buffer;
+    // read the data from carbon data file
+    synchronized (fileReader) {
+      buffer = fileReader.readByteBuffer(filePath, currentDimensionOffset,
+          dimensionChunksLength.get(blockletColumnIndex));
+    }
+    // get the data chunk which will have all the details about the data pages
+    DataChunk3 dataChunk = CarbonUtil.readDataChunk3(new ByteArrayInputStream(buffer.array()));
+    DimensionRawColumnChunk rawColumnChunk =
+        getDimensionRawColumnChunk(fileReader, blockletColumnIndex, currentDimensionOffset, length,
+            null, dataChunk);
+
+    return rawColumnChunk;
+  }
+
+  /**
+   * Below method will be used to read the multiple dimension column data in group
+   * and divide into dimension raw chunk object
+   * Steps for reading
+   * 1. Get the length of the data to be read
+   * 2. Allocate the direct buffer
+   * 3. read the data from file
+   * 4. Get the data chunk object from file for each column
+   * 5. Create the raw chunk object and fill the details for each column
+   * 6. increment the offset of the data
+   *
+   * @param fileReader      reader which will be used to read the dimension columns data from file
+   * @param startBlockletColumnIndex blocklet index of the first dimension column
+   * @param endBlockletColumnIndex   blocklet index of the last dimension column
+   * @ DimensionRawColumnChunk array
+   */
+  protected DimensionRawColumnChunk[] readRawDimensionChunksInGroup(FileHolder fileReader,
+      int startBlockletColumnIndex, int endBlockletColumnIndex) throws IOException {
+    // create raw chunk for each dimension column
+    DimensionRawColumnChunk[] dimensionDataChunks =
+        new DimensionRawColumnChunk[endBlockletColumnIndex - startBlockletColumnIndex + 1];
+    int index = 0;
+    for (int i = startBlockletColumnIndex; i <= endBlockletColumnIndex; i++) {
+      dimensionDataChunks[index] = readRawDimensionChunk(fileReader, i);
+      index++;
+    }
+    return dimensionDataChunks;
+  }
+
+  /**
+   * Below method will be used to convert the compressed dimension chunk raw data to actual data
+   *
+   * @param dimensionRawColumnChunk dimension raw chunk
+   * @param pageNumber              number
+   * @return DimensionColumnDataChunk
+   */
+  @Override public DimensionColumnDataChunk convertToDimensionChunk(
+      DimensionRawColumnChunk dimensionRawColumnChunk, int pageNumber)
+      throws IOException, MemoryException {
+    // data chunk of page
+    DataChunk2 pageMetadata = null;
+    // data chunk of blocklet column
+    DataChunk3 dataChunk3 = dimensionRawColumnChunk.getDataChunkV3();
+
+    pageMetadata = dataChunk3.getData_chunk_list().get(pageNumber);
+    // calculating the start point of data
+    // as buffer can contain multiple column data, start point will be datachunkoffset +
+    // data chunk length + page offset
+    long offset = dimensionRawColumnChunk.getOffSet() + dimensionChunksLength
+        .get(dimensionRawColumnChunk.getColumnIndex()) + dataChunk3.getPage_offset()
+        .get(pageNumber);
+    int length = pageMetadata.data_page_length;
+    if (hasEncoding(pageMetadata.encoders, Encoding.INVERTED_INDEX)) {
+      length += pageMetadata.rowid_page_length;
+    }
+
+    if (hasEncoding(pageMetadata.encoders, Encoding.RLE)) {
+      length += pageMetadata.rle_page_length;
+    }
+    // get the data buffer
+    ByteBuffer rawData = dimensionRawColumnChunk.getFileReader()
+        .readByteBuffer(filePath, offset, length);
+
+    return decodeDimension(dimensionRawColumnChunk, rawData, pageMetadata, 0);
+  }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
index bb828a6..566e9b7 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/dimension/v3/CompressedDimensionChunkFileBasedReaderV3.java
@@ -103,9 +103,15 @@ public class CompressedDimensionChunkFileBasedReaderV3 extends AbstractChunkRead
     }
     // get the data chunk which will have all the details about the data pages
     DataChunk3 dataChunk = CarbonUtil.readDataChunk3(buffer, 0, length);
+    return getDimensionRawColumnChunk(fileReader, blockletColumnIndex, 0, length, buffer,
+        dataChunk);
+  }
+
+  protected DimensionRawColumnChunk getDimensionRawColumnChunk(FileHolder fileReader,
+      int blockletColumnIndex, long offset, int length, ByteBuffer buffer, DataChunk3 dataChunk) {
     // creating a raw chunks instance and filling all the details
     DimensionRawColumnChunk rawColumnChunk =
-        new DimensionRawColumnChunk(blockletColumnIndex, buffer, 0, length, this);
+        new DimensionRawColumnChunk(blockletColumnIndex, buffer, offset, length, this);
     int numberOfPages = dataChunk.getPage_length().size();
     byte[][] maxValueOfEachPage = new byte[numberOfPages][];
     byte[][] minValueOfEachPage = new byte[numberOfPages][];
@@ -166,29 +172,11 @@ public class CompressedDimensionChunkFileBasedReaderV3 extends AbstractChunkRead
     int runningLength = 0;
     for (int i = startBlockletColumnIndex; i <= endBlockletColumnIndex; i++) {
       int currentLength = (int) (dimensionChunksOffset.get(i + 1) - dimensionChunksOffset.get(i));
-      dimensionDataChunks[index] =
-          new DimensionRawColumnChunk(i, buffer, runningLength, currentLength, this);
       DataChunk3 dataChunk =
           CarbonUtil.readDataChunk3(buffer, runningLength, dimensionChunksLength.get(i));
-      int numberOfPages = dataChunk.getPage_length().size();
-      byte[][] maxValueOfEachPage = new byte[numberOfPages][];
-      byte[][] minValueOfEachPage = new byte[numberOfPages][];
-      int[] eachPageLength = new int[numberOfPages];
-      for (int j = 0; j < minValueOfEachPage.length; j++) {
-        maxValueOfEachPage[j] =
-            dataChunk.getData_chunk_list().get(j).getMin_max().getMax_values().get(0).array();
-        minValueOfEachPage[j] =
-            dataChunk.getData_chunk_list().get(j).getMin_max().getMin_values().get(0).array();
-        eachPageLength[j] = dataChunk.getData_chunk_list().get(j).getNumberOfRowsInpage();
-      }
-      dimensionDataChunks[index].setDataChunkV3(dataChunk);
-      dimensionDataChunks[index].setFileHolder(fileReader);
-      dimensionDataChunks[index].setPagesCount(dataChunk.getPage_length().size());
-      dimensionDataChunks[index].setMaxValues(maxValueOfEachPage);
-      dimensionDataChunks[index].setMinValues(minValueOfEachPage);
-      dimensionDataChunks[index].setRowCount(eachPageLength);
-      dimensionDataChunks[index].setOffsets(ArrayUtils
-          .toPrimitive(dataChunk.page_offset.toArray(new Integer[dataChunk.page_offset.size()])));
+      dimensionDataChunks[index] =
+          getDimensionRawColumnChunk(fileReader, i, runningLength, currentLength, buffer,
+              dataChunk);
       runningLength += currentLength;
       index++;
     }
@@ -212,7 +200,7 @@ public class CompressedDimensionChunkFileBasedReaderV3 extends AbstractChunkRead
     // calculating the start point of data
     // as buffer can contain multiple column data, start point will be datachunkoffset +
     // data chunk length + page offset
-    int offset = rawColumnPage.getOffSet() + dimensionChunksLength
+    int offset = (int) rawColumnPage.getOffSet() + dimensionChunksLength
         .get(rawColumnPage.getColumnIndex()) + dataChunk3.getPage_offset().get(pageNumber);
     // first read the data and uncompressed it
     return decodeDimension(rawColumnPage, rawData, pageMetadata, offset);
@@ -240,7 +228,7 @@ public class CompressedDimensionChunkFileBasedReaderV3 extends AbstractChunkRead
     return false;
   }
 
-  private DimensionColumnDataChunk decodeDimension(DimensionRawColumnChunk rawColumnPage,
+  protected DimensionColumnDataChunk decodeDimension(DimensionRawColumnChunk rawColumnPage,
       ByteBuffer pageData, DataChunk2 pageMetadata, int offset)
       throws IOException, MemoryException {
     if (isEncodedWithMeta(pageMetadata)) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
index fcfd862..ae55375 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
@@ -98,7 +98,7 @@ public class CompressedMeasureChunkFileBasedReaderV1 extends AbstractMeasureChun
     ValueEncoderMeta meta = dataChunk.getValueEncoderMeta().get(0);
     ColumnPageDecoder codec = encodingFactory.createDecoderLegacy(meta);
     ColumnPage decodedPage = codec.decode(measureRawColumnChunk.getRawData().array(),
-        measureRawColumnChunk.getOffSet(), dataChunk.getDataPageLength());
+        (int) measureRawColumnChunk.getOffSet(), dataChunk.getDataPageLength());
     decodedPage.setNullBits(dataChunk.getNullValueIndexForColumn());
 
     return decodedPage;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
index 001c240..2ddc202 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v2/CompressedMeasureChunkFileBasedReaderV2.java
@@ -110,7 +110,7 @@ public class CompressedMeasureChunkFileBasedReaderV2 extends AbstractMeasureChun
 
   public ColumnPage convertToColumnPage(MeasureRawColumnChunk measureRawColumnChunk,
       int pageNumber) throws IOException, MemoryException {
-    int copyPoint = measureRawColumnChunk.getOffSet();
+    int copyPoint = (int) measureRawColumnChunk.getOffSet();
     int blockIndex = measureRawColumnChunk.getColumnIndex();
     ByteBuffer rawData = measureRawColumnChunk.getRawData();
     DataChunk2 measureColumnChunk = CarbonUtil.readDataChunk(rawData, copyPoint,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
index e207c82..4f6987b 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMeasureChunkFileBasedReaderV3.java
@@ -94,9 +94,17 @@ public class CompressedMeasureChunkFileBasedReaderV3 extends AbstractMeasureChun
     // get the data chunk which will have all the details about the data pages
     DataChunk3 dataChunk =
         CarbonUtil.readDataChunk3(buffer, 0, measureColumnChunkLength.get(columnIndex));
+
+    return getMeasureRawColumnChunk(fileReader, columnIndex, 0,  dataLength, buffer,
+        dataChunk);
+  }
+
+  protected MeasureRawColumnChunk getMeasureRawColumnChunk(FileHolder fileReader,
+      int columnIndex, long offset, int dataLength, ByteBuffer buffer,
+      DataChunk3 dataChunk) {
     // creating a raw chunks instance and filling all the details
     MeasureRawColumnChunk rawColumnChunk =
-        new MeasureRawColumnChunk(columnIndex, buffer, 0, dataLength, this);
+        new MeasureRawColumnChunk(columnIndex, buffer, offset, dataLength, this);
     int numberOfPages = dataChunk.getPage_length().size();
     byte[][] maxValueOfEachPage = new byte[numberOfPages][];
     byte[][] minValueOfEachPage = new byte[numberOfPages][];
@@ -158,31 +166,10 @@ public class CompressedMeasureChunkFileBasedReaderV3 extends AbstractMeasureChun
     for (int i = startColumnBlockletIndex; i <= endColumnBlockletIndex; i++) {
       int currentLength =
           (int) (measureColumnChunkOffsets.get(i + 1) - measureColumnChunkOffsets.get(i));
-      MeasureRawColumnChunk measureRawColumnChunk =
-          new MeasureRawColumnChunk(i, buffer, runningLength, currentLength, this);
       DataChunk3 dataChunk =
           CarbonUtil.readDataChunk3(buffer, runningLength, measureColumnChunkLength.get(i));
-
-      int numberOfPages = dataChunk.getPage_length().size();
-      byte[][] maxValueOfEachPage = new byte[numberOfPages][];
-      byte[][] minValueOfEachPage = new byte[numberOfPages][];
-      int[] eachPageLength = new int[numberOfPages];
-      for (int j = 0; j < minValueOfEachPage.length; j++) {
-        maxValueOfEachPage[j] =
-            dataChunk.getData_chunk_list().get(j).getMin_max().getMax_values().get(0).array();
-        minValueOfEachPage[j] =
-            dataChunk.getData_chunk_list().get(j).getMin_max().getMin_values().get(0).array();
-        eachPageLength[j] = dataChunk.getData_chunk_list().get(j).getNumberOfRowsInpage();
-      }
-      measureRawColumnChunk.setDataChunkV3(dataChunk);
-      ;
-      measureRawColumnChunk.setFileReader(fileReader);
-      measureRawColumnChunk.setPagesCount(dataChunk.getPage_length().size());
-      measureRawColumnChunk.setMaxValues(maxValueOfEachPage);
-      measureRawColumnChunk.setMinValues(minValueOfEachPage);
-      measureRawColumnChunk.setRowCount(eachPageLength);
-      measureRawColumnChunk.setOffsets(ArrayUtils
-          .toPrimitive(dataChunk.page_offset.toArray(new Integer[dataChunk.page_offset.size()])));
+      MeasureRawColumnChunk measureRawColumnChunk =
+          getMeasureRawColumnChunk(fileReader, i, runningLength, currentLength, buffer, dataChunk);
       measureDataChunk[index] = measureRawColumnChunk;
       runningLength += currentLength;
       index++;
@@ -208,7 +195,7 @@ public class CompressedMeasureChunkFileBasedReaderV3 extends AbstractMeasureChun
     // calculating the start point of data
     // as buffer can contain multiple column data, start point will be datachunkoffset +
     // data chunk length + page offset
-    int offset = rawColumnPage.getOffSet() +
+    int offset = (int) rawColumnPage.getOffSet() +
         measureColumnChunkLength.get(rawColumnPage.getColumnIndex()) +
         dataChunk3.getPage_offset().get(pageNumber);
     ColumnPage decodedPage = decodeMeasure(pageMetadata, rawColumnPage.getRawData(), offset);
@@ -219,7 +206,7 @@ public class CompressedMeasureChunkFileBasedReaderV3 extends AbstractMeasureChun
   /**
    * Decode measure column page with page header and raw data starting from offset
    */
-  private ColumnPage decodeMeasure(DataChunk2 pageMetadata, ByteBuffer pageData, int offset)
+  protected ColumnPage decodeMeasure(DataChunk2 pageMetadata, ByteBuffer pageData, int offset)
       throws MemoryException, IOException {
     List<Encoding> encodings = pageMetadata.getEncoders();
     List<ByteBuffer> encoderMetas = pageMetadata.getEncoder_meta();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMsrChunkFileBasedPageLevelReaderV3.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMsrChunkFileBasedPageLevelReaderV3.java b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMsrChunkFileBasedPageLevelReaderV3.java
new file mode 100644
index 0000000..31ff4c0
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/chunk/reader/measure/v3/CompressedMsrChunkFileBasedPageLevelReaderV3.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.carbondata.core.datastore.chunk.reader.measure.v3;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+
+import org.apache.carbondata.core.datastore.FileHolder;
+import org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk;
+import org.apache.carbondata.core.datastore.page.ColumnPage;
+import org.apache.carbondata.core.memory.MemoryException;
+import org.apache.carbondata.core.metadata.blocklet.BlockletInfo;
+import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.format.DataChunk2;
+import org.apache.carbondata.format.DataChunk3;
+
+/**
+ * Measure column V3 Reader class which will be used to read and uncompress
+ * V3 format data
+ * data format
+ * Data Format
+ * <FileHeader>
+ * <Column1 Data ChunkV3><Column1<Page1><Page2><Page3><Page4>>
+ * <Column2 Data ChunkV3><Column2<Page1><Page2><Page3><Page4>>
+ * <Column3 Data ChunkV3><Column3<Page1><Page2><Page3><Page4>>
+ * <Column4 Data ChunkV3><Column4<Page1><Page2><Page3><Page4>>
+ * <File Footer>
+ */
+public class CompressedMsrChunkFileBasedPageLevelReaderV3
+    extends CompressedMeasureChunkFileBasedReaderV3 {
+
+  /**
+   * end position of last measure in carbon data file
+   */
+  private long measureOffsets;
+
+  public CompressedMsrChunkFileBasedPageLevelReaderV3(BlockletInfo blockletInfo, String filePath) {
+    super(blockletInfo, filePath);
+    measureOffsets = blockletInfo.getMeasureOffsets();
+  }
+
+  /**
+   * Below method will be used to read the measure column data form carbon data file
+   * 1. Get the length of the data to be read
+   * 2. Allocate the direct buffer
+   * 3. read the data from file
+   * 4. Get the data chunk object from data read
+   * 5. Create the raw chunk object and fill the details
+   *
+   * @param fileReader          reader for reading the column from carbon data file
+   * @param blockletColumnIndex blocklet index of the column in carbon data file
+   * @return measure raw chunk
+   */
+  @Override public MeasureRawColumnChunk readRawMeasureChunk(FileHolder fileReader,
+      int blockletColumnIndex) throws IOException {
+    int dataLength = 0;
+    // to calculate the length of the data to be read
+    // column other than last column we can subtract the offset of current column with
+    // next column and get the total length.
+    // but for last column we need to use lastDimensionOffset which is the end position
+    // of the last dimension, we can subtract current dimension offset from lastDimesionOffset
+    if (measureColumnChunkOffsets.size() - 1 == blockletColumnIndex) {
+      dataLength = (int) (measureOffsets - measureColumnChunkOffsets.get(blockletColumnIndex));
+    } else {
+      dataLength =
+          (int) (measureColumnChunkOffsets.get(blockletColumnIndex + 1) - measureColumnChunkOffsets
+              .get(blockletColumnIndex));
+    }
+    ByteBuffer buffer;
+    // read the data from carbon data file
+    synchronized (fileReader) {
+      buffer = fileReader
+          .readByteBuffer(filePath, measureColumnChunkOffsets.get(blockletColumnIndex),
+              measureColumnChunkLength.get(blockletColumnIndex));
+    }
+    // get the data chunk which will have all the details about the data pages
+    DataChunk3 dataChunk = CarbonUtil.readDataChunk3(new ByteArrayInputStream(buffer.array()));
+    return getMeasureRawColumnChunk(fileReader, blockletColumnIndex,
+        measureColumnChunkOffsets.get(blockletColumnIndex), dataLength, null, dataChunk);
+  }
+
+  /**
+   * Below method will be used to read the multiple measure column data in group
+   * and divide into measure raw chunk object
+   * Steps for reading
+   * 1. Get the length of the data to be read
+   * 2. Allocate the direct buffer
+   * 3. read the data from file
+   * 4. Get the data chunk object from file for each column
+   * 5. Create the raw chunk object and fill the details for each column
+   * 6. increment the offset of the data
+   *
+   * @param fileReader         reader which will be used to read the measure columns data from file
+   * @param startColumnBlockletIndex blocklet index of the first measure column
+   * @param endColumnBlockletIndex   blocklet index of the last measure column
+   * @return MeasureRawColumnChunk array
+   */
+  protected MeasureRawColumnChunk[] readRawMeasureChunksInGroup(FileHolder fileReader,
+      int startColumnBlockletIndex, int endColumnBlockletIndex) throws IOException {
+    // create raw chunk for each measure column
+    MeasureRawColumnChunk[] measureDataChunk =
+        new MeasureRawColumnChunk[endColumnBlockletIndex - startColumnBlockletIndex + 1];
+    int index = 0;
+    for (int i = startColumnBlockletIndex; i <= endColumnBlockletIndex; i++) {
+      measureDataChunk[index] = readRawMeasureChunk(fileReader, i);
+      index++;
+    }
+    return measureDataChunk;
+  }
+
+  /**
+   * Below method will be used to convert the compressed measure chunk raw data to actual data
+   *
+   * @param rawColumnPage measure raw chunk
+   * @param pageNumber            number
+   * @return DimensionColumnDataChunk
+   */
+  @Override public ColumnPage convertToColumnPage(
+      MeasureRawColumnChunk rawColumnPage, int pageNumber)
+      throws IOException, MemoryException {
+    // data chunk of blocklet column
+    DataChunk3 dataChunk3 = rawColumnPage.getDataChunkV3();
+    // data chunk of page
+    DataChunk2 pageMetadata = dataChunk3.getData_chunk_list().get(pageNumber);
+    // calculating the start point of data
+    // as buffer can contain multiple column data, start point will be datachunkoffset +
+    // data chunk length + page offset
+    long offset = rawColumnPage.getOffSet() + measureColumnChunkLength
+        .get(rawColumnPage.getColumnIndex()) + dataChunk3.getPage_offset().get(pageNumber);
+    ByteBuffer buffer = rawColumnPage.getFileReader()
+        .readByteBuffer(filePath, offset, pageMetadata.data_page_length);
+
+    ColumnPage decodedPage = decodeMeasure(pageMetadata, buffer, 0);
+    decodedPage.setNullBits(getNullBitSet(pageMetadata.presence));
+    return decodedPage;
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/impl/DFSFileHolderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/DFSFileHolderImpl.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/DFSFileHolderImpl.java
index 3032ec2..182c8eb 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/DFSFileHolderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/DFSFileHolderImpl.java
@@ -16,8 +16,6 @@
  */
 package org.apache.carbondata.core.datastore.impl;
 
-import java.io.BufferedInputStream;
-import java.io.DataInputStream;
 import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.HashMap;
@@ -39,6 +37,8 @@ public class DFSFileHolderImpl implements FileHolder {
 
   private String queryId;
 
+  private boolean readPageByPage;
+
 
   public DFSFileHolderImpl() {
     this.fileNameAndStreamCache =
@@ -151,12 +151,14 @@ public class DFSFileHolderImpl implements FileHolder {
     return queryId;
   }
 
-  @Override public DataInputStream getDataInputStream(String filePath, long offset)
-      throws IOException {
-    FSDataInputStream fsDataInputStream = updateCache(filePath);
-    fsDataInputStream.seek(offset);
-    return new DataInputStream(new BufferedInputStream(fsDataInputStream, 1 * 1024 * 1024));
+  @Override public void setReadPageByPage(boolean isReadPageByPage) {
+    this.readPageByPage = isReadPageByPage;
   }
+
+  @Override public boolean isReadPageByPage() {
+    return readPageByPage;
+  }
+
   public Map<String, FSDataInputStream> getFileNameAndStreamCache() {
     return fileNameAndStreamCache;
   }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileHolderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileHolderImpl.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileHolderImpl.java
index c867f75..cc589b7 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileHolderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileHolderImpl.java
@@ -17,8 +17,6 @@
 
 package org.apache.carbondata.core.datastore.impl;
 
-import java.io.BufferedInputStream;
-import java.io.DataInputStream;
 import java.io.FileInputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -38,6 +36,8 @@ public class FileHolderImpl implements FileHolder {
   private Map<String, FileChannel> fileNameAndStreamCache;
   private String queryId;
 
+  private boolean readPageByPage;
+
   /**
    * FileHolderImpl Constructor
    * It will create the cache
@@ -214,16 +214,11 @@ public class FileHolderImpl implements FileHolder {
     return queryId;
   }
 
-  @Override public DataInputStream getDataInputStream(String filePath, long offset)
-      throws IOException {
-    FileInputStream stream = new FileInputStream(filePath);
-    long skipped = stream.skip(offset);
-    long toSkip = offset - skipped;
-    while (toSkip <= 0) {
-      skipped = stream.skip(toSkip);
-      toSkip = toSkip - skipped;
-    }
+  @Override public void setReadPageByPage(boolean isReadPageByPage) {
+    this.readPageByPage = isReadPageByPage;
+  }
 
-    return new DataInputStream(new BufferedInputStream(stream));
+  @Override public boolean isReadPageByPage() {
+    return readPageByPage;
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BlockletBTreeLeafNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BlockletBTreeLeafNode.java b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BlockletBTreeLeafNode.java
index 82c4169..94221ba 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BlockletBTreeLeafNode.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/btree/BlockletBTreeLeafNode.java
@@ -44,6 +44,16 @@ public class BlockletBTreeLeafNode extends AbstractBTreeLeafNode {
   private MeasureColumnChunkReader measureColumnChunkReader;
 
   /**
+   * reader for dimension chunk of page level
+   */
+  private DimensionColumnChunkReader dimensionChunksPageLevelReader;
+
+  /**
+   * reader of measure chunk of page level
+   */
+  private MeasureColumnChunkReader measureColumnChunkPageLevelReader;
+
+  /**
    * number of pages in blocklet
    */
   private int numberOfPages;
@@ -76,12 +86,28 @@ public class BlockletBTreeLeafNode extends AbstractBTreeLeafNode {
         .getDimensionColumnChunkReader(builderInfos.getFooterList().get(0).getVersionId(),
             builderInfos.getFooterList().get(0).getBlockletList().get(leafIndex),
             builderInfos.getDimensionColumnValueSize(),
-            builderInfos.getFooterList().get(0).getBlockInfo().getTableBlockInfo().getFilePath());
+            builderInfos.getFooterList().get(0).getBlockInfo().getTableBlockInfo().getFilePath(),
+            false);
     // create a instance of measure column chunk reader
     measureColumnChunkReader = CarbonDataReaderFactory.getInstance()
         .getMeasureColumnChunkReader(builderInfos.getFooterList().get(0).getVersionId(),
             builderInfos.getFooterList().get(0).getBlockletList().get(leafIndex),
-            builderInfos.getFooterList().get(0).getBlockInfo().getTableBlockInfo().getFilePath());
+            builderInfos.getFooterList().get(0).getBlockInfo().getTableBlockInfo().getFilePath(),
+            false);
+    // create a instance of dimension chunk
+    dimensionChunksPageLevelReader = CarbonDataReaderFactory.getInstance()
+        .getDimensionColumnChunkReader(builderInfos.getFooterList().get(0).getVersionId(),
+            builderInfos.getFooterList().get(0).getBlockletList().get(leafIndex),
+            builderInfos.getDimensionColumnValueSize(),
+            builderInfos.getFooterList().get(0).getBlockInfo().getTableBlockInfo().getFilePath(),
+            true);
+    // create a instance of measure column chunk reader
+    measureColumnChunkPageLevelReader = CarbonDataReaderFactory.getInstance()
+        .getMeasureColumnChunkReader(builderInfos.getFooterList().get(0).getVersionId(),
+            builderInfos.getFooterList().get(0).getBlockletList().get(leafIndex),
+            builderInfos.getFooterList().get(0).getBlockInfo().getTableBlockInfo().getFilePath(),
+            true);
+
     this.nodeNumber = nodeNumber;
     this.numberOfPages =
         builderInfos.getFooterList().get(0).getBlockletList().get(leafIndex).getNumberOfPages();
@@ -111,7 +137,11 @@ public class BlockletBTreeLeafNode extends AbstractBTreeLeafNode {
    */
   @Override public DimensionRawColumnChunk[] getDimensionChunks(FileHolder fileReader,
       int[][] blockIndexes) throws IOException {
-    return dimensionChunksReader.readRawDimensionChunks(fileReader, blockIndexes);
+    if (fileReader.isReadPageByPage()) {
+      return dimensionChunksPageLevelReader.readRawDimensionChunks(fileReader, blockIndexes);
+    } else {
+      return dimensionChunksReader.readRawDimensionChunks(fileReader, blockIndexes);
+    }
   }
 
   /**
@@ -123,7 +153,11 @@ public class BlockletBTreeLeafNode extends AbstractBTreeLeafNode {
    */
   @Override public DimensionRawColumnChunk getDimensionChunk(FileHolder fileReader, int blockIndex)
       throws IOException {
-    return dimensionChunksReader.readRawDimensionChunk(fileReader, blockIndex);
+    if (fileReader.isReadPageByPage()) {
+      return dimensionChunksPageLevelReader.readRawDimensionChunk(fileReader, blockIndex);
+    } else {
+      return dimensionChunksReader.readRawDimensionChunk(fileReader, blockIndex);
+    }
   }
 
   /**
@@ -135,7 +169,11 @@ public class BlockletBTreeLeafNode extends AbstractBTreeLeafNode {
    */
   @Override public MeasureRawColumnChunk[] getMeasureChunks(FileHolder fileReader,
       int[][] blockIndexes) throws IOException {
-    return measureColumnChunkReader.readRawMeasureChunks(fileReader, blockIndexes);
+    if (fileReader.isReadPageByPage()) {
+      return measureColumnChunkPageLevelReader.readRawMeasureChunks(fileReader, blockIndexes);
+    } else {
+      return measureColumnChunkReader.readRawMeasureChunks(fileReader, blockIndexes);
+    }
   }
 
   /**
@@ -147,7 +185,11 @@ public class BlockletBTreeLeafNode extends AbstractBTreeLeafNode {
    */
   @Override public MeasureRawColumnChunk getMeasureChunk(FileHolder fileReader, int blockIndex)
       throws IOException {
-    return measureColumnChunkReader.readRawMeasureChunk(fileReader, blockIndex);
+    if (fileReader.isReadPageByPage()) {
+      return measureColumnChunkPageLevelReader.readRawMeasureChunk(fileReader, blockIndex);
+    } else {
+      return measureColumnChunkReader.readRawMeasureChunk(fileReader, blockIndex);
+    }
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNodeWrapper.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNodeWrapper.java b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNodeWrapper.java
index 1585414..097dd8c 100644
--- a/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNodeWrapper.java
+++ b/core/src/main/java/org/apache/carbondata/core/indexstore/blockletindex/BlockletDataRefNodeWrapper.java
@@ -117,46 +117,56 @@ public class BlockletDataRefNodeWrapper implements DataRefNode {
   @Override
   public DimensionRawColumnChunk[] getDimensionChunks(FileHolder fileReader, int[][] blockIndexes)
       throws IOException {
-    DimensionColumnChunkReader dimensionChunksReader = getDimensionColumnChunkReader();
+    DimensionColumnChunkReader dimensionChunksReader = getDimensionColumnChunkReader(fileReader);
     return dimensionChunksReader.readRawDimensionChunks(fileReader, blockIndexes);
   }
 
   @Override
   public DimensionRawColumnChunk getDimensionChunk(FileHolder fileReader, int blockIndexes)
       throws IOException {
-    DimensionColumnChunkReader dimensionChunksReader = getDimensionColumnChunkReader();
+    DimensionColumnChunkReader dimensionChunksReader = getDimensionColumnChunkReader(fileReader);
     return dimensionChunksReader.readRawDimensionChunk(fileReader, blockIndexes);
   }
 
   @Override
   public MeasureRawColumnChunk[] getMeasureChunks(FileHolder fileReader, int[][] blockIndexes)
       throws IOException {
-    MeasureColumnChunkReader measureColumnChunkReader = getMeasureColumnChunkReader();
+    MeasureColumnChunkReader measureColumnChunkReader = getMeasureColumnChunkReader(fileReader);
     return measureColumnChunkReader.readRawMeasureChunks(fileReader, blockIndexes);
   }
 
   @Override public MeasureRawColumnChunk getMeasureChunk(FileHolder fileReader, int blockIndex)
       throws IOException {
-    MeasureColumnChunkReader measureColumnChunkReader = getMeasureColumnChunkReader();
+    MeasureColumnChunkReader measureColumnChunkReader = getMeasureColumnChunkReader(fileReader);
     return measureColumnChunkReader.readRawMeasureChunk(fileReader, blockIndex);
   }
 
-  private DimensionColumnChunkReader getDimensionColumnChunkReader() throws IOException {
+  private DimensionColumnChunkReader getDimensionColumnChunkReader(FileHolder fileReader) {
     ColumnarFormatVersion version =
         ColumnarFormatVersion.valueOf(blockInfos.get(index).getDetailInfo().getVersionNumber());
-    return CarbonDataReaderFactory.getInstance().getDimensionColumnChunkReader(
-        version,
-        blockInfos.get(index).getDetailInfo().getBlockletInfo(),
-        dimensionLens,
-        blockInfos.get(index).getFilePath());
+    if (fileReader.isReadPageByPage()) {
+      return CarbonDataReaderFactory.getInstance().getDimensionColumnChunkReader(version,
+          blockInfos.get(index).getDetailInfo().getBlockletInfo(), dimensionLens,
+          blockInfos.get(index).getFilePath(), true);
+    } else {
+      return CarbonDataReaderFactory.getInstance().getDimensionColumnChunkReader(version,
+          blockInfos.get(index).getDetailInfo().getBlockletInfo(), dimensionLens,
+          blockInfos.get(index).getFilePath(), false);
+    }
   }
 
-  private MeasureColumnChunkReader getMeasureColumnChunkReader() throws IOException {
+  private MeasureColumnChunkReader getMeasureColumnChunkReader(FileHolder fileReader) {
     ColumnarFormatVersion version =
         ColumnarFormatVersion.valueOf(blockInfos.get(index).getDetailInfo().getVersionNumber());
-    return CarbonDataReaderFactory.getInstance().getMeasureColumnChunkReader(version,
-        blockInfos.get(index).getDetailInfo().getBlockletInfo(),
-        blockInfos.get(index).getFilePath());
+    if (fileReader.isReadPageByPage()) {
+      return CarbonDataReaderFactory.getInstance().getMeasureColumnChunkReader(version,
+          blockInfos.get(index).getDetailInfo().getBlockletInfo(),
+          blockInfos.get(index).getFilePath(), true);
+    } else {
+      return CarbonDataReaderFactory.getInstance().getMeasureColumnChunkReader(version,
+          blockInfos.get(index).getDetailInfo().getBlockletInfo(),
+          blockInfos.get(index).getFilePath(), false);
+    }
   }
 
   @Override

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
index 694f8ae..c33d5ac 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/impl/AbstractQueryExecutor.java
@@ -315,6 +315,7 @@ public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
     // total number dimension
     blockExecutionInfo
         .setTotalNumberDimensionBlock(segmentProperties.getDimensionOrdinalToBlockMapping().size());
+    blockExecutionInfo.setPrefetchBlocklet(!queryModel.isReadPageByPage());
     blockExecutionInfo
         .setTotalNumberOfMeasureBlock(segmentProperties.getMeasuresOrdinalToBlockMapping().size());
     blockExecutionInfo.setAbsoluteTableIdentifier(queryModel.getAbsoluteTableIdentifier());

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java b/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
index adb6dc6..65f2838 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/executor/infos/BlockExecutionInfo.java
@@ -223,6 +223,11 @@ public class BlockExecutionInfo {
    */
   private String[] deleteDeltaFilePath;
 
+  /**
+   * whether to prefetch the blocklet data while scanning
+   */
+  private boolean prefetchBlocklet = true;
+
   private Map<String, DeleteDeltaVo> deletedRecordsMap;
   public AbsoluteTableIdentifier getAbsoluteTableIdentifier() {
     return absoluteTableIdentifier;
@@ -658,4 +663,12 @@ public class BlockExecutionInfo {
   public void setDeletedRecordsMap(Map<String, DeleteDeltaVo> deletedRecordsMap) {
     this.deletedRecordsMap = deletedRecordsMap;
   }
+
+  public boolean isPrefetchBlocklet() {
+    return prefetchBlocklet;
+  }
+
+  public void setPrefetchBlocklet(boolean prefetchBlocklet) {
+    this.prefetchBlocklet = prefetchBlocklet;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java b/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java
index 5e4872b..655ed5e 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/model/QueryModel.java
@@ -112,6 +112,11 @@ public class QueryModel implements Serializable {
   private boolean[] isFilterDimensions;
   private boolean[] isFilterMeasures;
 
+  /**
+   * Read the data from carbondata file page by page instead of whole blocklet.
+   */
+  private boolean readPageByPage;
+
   public QueryModel() {
     tableBlockInfos = new ArrayList<TableBlockInfo>();
     queryDimension = new ArrayList<QueryDimension>();
@@ -417,4 +422,12 @@ public class QueryModel implements Serializable {
   public void setIsFilterMeasures(boolean[] isFilterMeasures) {
     this.isFilterMeasures = isFilterMeasures;
   }
+
+  public boolean isReadPageByPage() {
+    return readPageByPage;
+  }
+
+  public void setReadPageByPage(boolean readPageByPage) {
+    this.readPageByPage = readPageByPage;
+  }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/scan/processor/AbstractDataBlockIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/processor/AbstractDataBlockIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/processor/AbstractDataBlockIterator.java
index 798d331..eb5e3f9 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/processor/AbstractDataBlockIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/processor/AbstractDataBlockIterator.java
@@ -139,15 +139,24 @@ public abstract class AbstractDataBlockIterator extends CarbonIterator<List<Obje
 
   private AbstractScannedResult getNextScannedResult() throws Exception {
     AbstractScannedResult result = null;
-    if (dataBlockIterator.hasNext() || nextBlock.get() || nextRead.get()) {
-      if (future == null) {
-        future = execute();
+    if (blockExecutionInfo.isPrefetchBlocklet()) {
+      if (dataBlockIterator.hasNext() || nextBlock.get() || nextRead.get()) {
+        if (future == null) {
+          future = execute();
+        }
+        result = future.get();
+        nextBlock.set(false);
+        if (dataBlockIterator.hasNext() || nextRead.get()) {
+          nextBlock.set(true);
+          future = execute();
+        }
       }
-      result = future.get();
-      nextBlock.set(false);
-      if (dataBlockIterator.hasNext() || nextRead.get()) {
-        nextBlock.set(true);
-        future = execute();
+    } else {
+      if (dataBlockIterator.hasNext()) {
+        BlocksChunkHolder blocksChunkHolder = getBlocksChunkHolder();
+        if (blocksChunkHolder != null) {
+          result = blockletScanner.scanBlocklet(blocksChunkHolder);
+        }
       }
     }
     return result;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/scan/result/AbstractScannedResult.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/AbstractScannedResult.java b/core/src/main/java/org/apache/carbondata/core/scan/result/AbstractScannedResult.java
index 7cc02ad..b089fad 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/AbstractScannedResult.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/AbstractScannedResult.java
@@ -28,6 +28,7 @@ import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datastore.chunk.DimensionColumnDataChunk;
 import org.apache.carbondata.core.datastore.chunk.impl.DimensionRawColumnChunk;
+import org.apache.carbondata.core.datastore.chunk.impl.MeasureRawColumnChunk;
 import org.apache.carbondata.core.datastore.page.ColumnPage;
 import org.apache.carbondata.core.mutate.CarbonUpdateUtil;
 import org.apache.carbondata.core.mutate.DeleteDeltaVo;
@@ -83,7 +84,12 @@ public abstract class AbstractScannedResult {
   /**
    * Raw dimension chunks;
    */
-  protected DimensionRawColumnChunk[] rawColumnChunks;
+  protected DimensionRawColumnChunk[] dimRawColumnChunks;
+
+  /**
+   * Raw dimension chunks;
+   */
+  protected MeasureRawColumnChunk[] msrRawColumnChunks;
   /**
    * measure column data chunk
    */
@@ -172,8 +178,12 @@ public abstract class AbstractScannedResult {
     this.measureDataChunks = measureDataChunks;
   }
 
-  public void setRawColumnChunks(DimensionRawColumnChunk[] rawColumnChunks) {
-    this.rawColumnChunks = rawColumnChunks;
+  public void setDimRawColumnChunks(DimensionRawColumnChunk[] dimRawColumnChunks) {
+    this.dimRawColumnChunks = dimRawColumnChunks;
+  }
+
+  public void setMsrRawColumnChunks(MeasureRawColumnChunk[] msrRawColumnChunks) {
+    this.msrRawColumnChunks = msrRawColumnChunks;
   }
 
   /**
@@ -269,7 +279,7 @@ public abstract class AbstractScannedResult {
         DataOutputStream dataOutput = new DataOutputStream(byteStream);
         try {
           vectorInfos[i].genericQueryType
-              .parseBlocksAndReturnComplexColumnByteArray(rawColumnChunks,
+              .parseBlocksAndReturnComplexColumnByteArray(dimRawColumnChunks,
                   rowMapping == null ? j : rowMapping[pageCounter][j], pageCounter, dataOutput);
           Object data = vectorInfos[i].genericQueryType
               .getDataBasedOnDataTypeFromSurrogates(ByteBuffer.wrap(byteStream.toByteArray()));
@@ -325,11 +335,51 @@ public abstract class AbstractScannedResult {
     rowCounter = 0;
     currentRow = -1;
     pageCounter++;
+    fillDataChunks();
     if (null != deletedRecordMap) {
       currentDeleteDeltaVo = deletedRecordMap.get(blockletNumber + "_" + pageCounter);
     }
   }
 
+  /**
+   * This case is used only in case of compaction, since it does not use filter flow.
+   */
+  public void fillDataChunks() {
+    freeDataChunkMemory();
+    if (pageCounter >= numberOfRows.length) {
+      return;
+    }
+    for (int i = 0; i < dimensionDataChunks.length; i++) {
+      if (dimensionDataChunks[i][pageCounter] == null && dimRawColumnChunks[i] != null) {
+        dimensionDataChunks[i][pageCounter] =
+            dimRawColumnChunks[i].convertToDimColDataChunkWithOutCache(pageCounter);
+      }
+    }
+
+    for (int i = 0; i < measureDataChunks.length; i++) {
+      if (measureDataChunks[i][pageCounter] == null && msrRawColumnChunks[i] != null) {
+        measureDataChunks[i][pageCounter] =
+            msrRawColumnChunks[i].convertToColumnPageWithOutCache(pageCounter);
+      }
+    }
+  }
+
+  // free the memory for the last page chunk
+  private void freeDataChunkMemory() {
+    for (int i = 0; i < dimensionDataChunks.length; i++) {
+      if (pageCounter > 0 && dimensionDataChunks[i][pageCounter - 1] != null) {
+        dimensionDataChunks[i][pageCounter - 1].freeMemory();
+        dimensionDataChunks[i][pageCounter - 1] = null;
+      }
+    }
+    for (int i = 0; i < measureDataChunks.length; i++) {
+      if (pageCounter > 0 && measureDataChunks[i][pageCounter - 1] != null) {
+        measureDataChunks[i][pageCounter - 1].freeMemory();
+        measureDataChunks[i][pageCounter - 1] = null;
+      }
+    }
+  }
+
   public int numberOfpages() {
     return numberOfRows.length;
   }
@@ -451,7 +501,7 @@ public abstract class AbstractScannedResult {
       DataOutputStream dataOutput = new DataOutputStream(byteStream);
       try {
         genericQueryType
-            .parseBlocksAndReturnComplexColumnByteArray(rawColumnChunks, rowId, pageCounter,
+            .parseBlocksAndReturnComplexColumnByteArray(dimRawColumnChunks, rowId, pageCounter,
                 dataOutput);
         complexTypeData[i] = byteStream.toByteArray();
       } catch (IOException e) {
@@ -481,6 +531,7 @@ public abstract class AbstractScannedResult {
       return true;
     } else if (pageCounter < numberOfRows.length) {
       pageCounter++;
+      fillDataChunks();
       rowCounter = 0;
       currentRow = -1;
       if (null != deletedRecordMap) {
@@ -520,10 +571,10 @@ public abstract class AbstractScannedResult {
       }
     }
     // free the raw chunks
-    if (null != rawColumnChunks) {
-      for (int i = 0; i < rawColumnChunks.length; i++) {
-        if (null != rawColumnChunks[i]) {
-          rawColumnChunks[i].freeMemory();
+    if (null != dimRawColumnChunks) {
+      for (int i = 0; i < dimRawColumnChunks.length; i++) {
+        if (null != dimRawColumnChunks[i]) {
+          dimRawColumnChunks[i].freeMemory();
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
index 8dd61b1..6172b40 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/result/iterator/AbstractDetailQueryResultIterator.java
@@ -108,6 +108,7 @@ public abstract class AbstractDetailQueryResultIterator<E> extends CarbonIterato
     this.fileReader = FileFactory.getFileHolder(
         FileFactory.getFileType(queryModel.getAbsoluteTableIdentifier().getTablePath()));
     this.fileReader.setQueryId(queryModel.getQueryId());
+    this.fileReader.setReadPageByPage(queryModel.isReadPageByPage());
     this.execService = execService;
     intialiseInfos();
     initQueryStatiticsModel();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/scan/scanner/AbstractBlockletScanner.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/scanner/AbstractBlockletScanner.java b/core/src/main/java/org/apache/carbondata/core/scan/scanner/AbstractBlockletScanner.java
index 19d6f48..bf26ca3 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/scanner/AbstractBlockletScanner.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/scanner/AbstractBlockletScanner.java
@@ -77,25 +77,34 @@ public abstract class AbstractBlockletScanner implements BlockletScanner {
     scannedResult.setBlockletId(
         blockExecutionInfo.getBlockId() + CarbonCommonConstants.FILE_SEPARATOR + blocksChunkHolder
             .getDataBlock().blockletId());
+    if (!blockExecutionInfo.isPrefetchBlocklet()) {
+      readBlocklet(blocksChunkHolder);
+    }
     DimensionRawColumnChunk[] dimensionRawColumnChunks =
         blocksChunkHolder.getDimensionRawDataChunk();
     DimensionColumnDataChunk[][] dimensionColumnDataChunks =
-        new DimensionColumnDataChunk[dimensionRawColumnChunks.length][];
-    for (int i = 0; i < dimensionRawColumnChunks.length; i++) {
-      if (dimensionRawColumnChunks[i] != null) {
-        dimensionColumnDataChunks[i] = dimensionRawColumnChunks[i].convertToDimColDataChunks();
-      }
-    }
-    scannedResult.setDimensionChunks(dimensionColumnDataChunks);
+        new DimensionColumnDataChunk[dimensionRawColumnChunks.length][blocksChunkHolder
+            .getDataBlock().numberOfPages()];
     MeasureRawColumnChunk[] measureRawColumnChunks = blocksChunkHolder.getMeasureRawDataChunk();
     ColumnPage[][] columnPages =
-        new ColumnPage[measureRawColumnChunks.length][];
-    for (int i = 0; i < measureRawColumnChunks.length; i++) {
-      if (measureRawColumnChunks[i] != null) {
-        columnPages[i] = measureRawColumnChunks[i].convertToColumnPage();
+        new ColumnPage[measureRawColumnChunks.length][blocksChunkHolder.getDataBlock()
+                       .numberOfPages()];
+    scannedResult.setDimensionChunks(dimensionColumnDataChunks);
+    scannedResult.setMeasureChunks(columnPages);
+    scannedResult.setDimRawColumnChunks(dimensionRawColumnChunks);
+    scannedResult.setMsrRawColumnChunks(measureRawColumnChunks);
+    if (blockExecutionInfo.isPrefetchBlocklet()) {
+      for (int i = 0; i < dimensionRawColumnChunks.length; i++) {
+        if (dimensionRawColumnChunks[i] != null) {
+          dimensionColumnDataChunks[i] = dimensionRawColumnChunks[i].convertToDimColDataChunks();
+        }
+      }
+      for (int i = 0; i < measureRawColumnChunks.length; i++) {
+        if (measureRawColumnChunks[i] != null) {
+          columnPages[i] = measureRawColumnChunks[i].convertToColumnPage();
+        }
       }
     }
-    scannedResult.setMeasureChunks(columnPages);
     int[] numberOfRows = null;
     if (blockExecutionInfo.getAllSelectedDimensionBlocksIndexes().length > 0) {
       for (int i = 0; i < dimensionRawColumnChunks.length; i++) {
@@ -112,6 +121,7 @@ public abstract class AbstractBlockletScanner implements BlockletScanner {
         }
       }
     }
+
     // count(*)  case there would not be any dimensions are measures selected.
     if (numberOfRows == null) {
       numberOfRows = new int[blocksChunkHolder.getDataBlock().numberOfPages()];
@@ -127,7 +137,9 @@ public abstract class AbstractBlockletScanner implements BlockletScanner {
       }
     }
     scannedResult.setNumberOfRows(numberOfRows);
-    scannedResult.setRawColumnChunks(dimensionRawColumnChunks);
+    if (!blockExecutionInfo.isPrefetchBlocklet()) {
+      scannedResult.fillDataChunks();
+    }
     // adding statistics for carbon scan time
     QueryStatistic scanTime = queryStatisticsModel.getStatisticsTypeAndObjMap()
         .get(QueryStatisticsConstants.SCAN_BLOCKlET_TIME);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/FilterScanner.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/FilterScanner.java b/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/FilterScanner.java
index 79f9b49..e77093b 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/FilterScanner.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/scanner/impl/FilterScanner.java
@@ -309,7 +309,8 @@ public class FilterScanner extends AbstractBlockletScanner {
     scannedResult.setDimensionChunks(dimensionColumnDataChunks);
     scannedResult.setIndexes(indexesGroup);
     scannedResult.setMeasureChunks(columnPages);
-    scannedResult.setRawColumnChunks(dimensionRawColumnChunks);
+    scannedResult.setDimRawColumnChunks(dimensionRawColumnChunks);
+    scannedResult.setMsrRawColumnChunks(measureRawColumnChunks);
     scannedResult.setNumberOfRows(rowCount);
     // adding statistics for carbon scan time
     QueryStatistic scanTime = queryStatisticsModel.getStatisticsTypeAndObjMap()

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
index b1c0c30..5d7a09f 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonUtil.java
@@ -24,6 +24,7 @@ import java.io.Closeable;
 import java.io.DataInputStream;
 import java.io.File;
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.io.ObjectInputStream;
 import java.io.UnsupportedEncodingException;
@@ -1467,6 +1468,22 @@ public final class CarbonUtil {
     }, offset, length);
   }
 
+  public static DataChunk3 readDataChunk3(InputStream stream) throws IOException {
+    TBaseCreator creator = new ThriftReader.TBaseCreator() {
+      @Override public TBase create() {
+        return new DataChunk3();
+      }
+    };
+    TProtocol binaryIn = new TCompactProtocol(new TIOStreamTransport(stream));
+    TBase t = creator.create();
+    try {
+      t.read(binaryIn);
+    } catch (TException e) {
+      throw new IOException(e);
+    }
+    return (DataChunk3) t;
+  }
+
   public static DataChunk2 readDataChunk(ByteBuffer dataChunkBuffer, int offset, int length)
       throws IOException {
     byte[] data = dataChunkBuffer.array();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/d509f17f/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionExecutor.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionExecutor.java b/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionExecutor.java
index 65f70a0..f51ced3 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionExecutor.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/merger/CarbonCompactionExecutor.java
@@ -45,6 +45,7 @@ import org.apache.carbondata.core.scan.model.QueryMeasure;
 import org.apache.carbondata.core.scan.model.QueryModel;
 import org.apache.carbondata.core.scan.result.BatchResult;
 import org.apache.carbondata.core.scan.result.iterator.RawResultIterator;
+import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.core.util.DataTypeUtil;
 
@@ -205,6 +206,7 @@ public class CarbonCompactionExecutor {
     model.setForcedDetailRawQuery(true);
     model.setFilterExpressionResolverTree(null);
     model.setConverter(DataTypeUtil.getDataTypeConverter());
+    model.setReadPageByPage(enablePageLevelReaderForCompaction());
 
     List<QueryDimension> dims = new ArrayList<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
 
@@ -234,4 +236,21 @@ public class CarbonCompactionExecutor {
     return model;
   }
 
+  /**
+   * Whether to enable page level reader for compaction or not.
+   */
+  private boolean enablePageLevelReaderForCompaction() {
+    String enablePageReaderProperty = CarbonProperties.getInstance()
+        .getProperty(CarbonCommonConstants.CARBON_ENABLE_PAGE_LEVEL_READER_IN_COMPACTION,
+            CarbonCommonConstants.CARBON_ENABLE_PAGE_LEVEL_READER_IN_COMPACTION_DEFAULT);
+    boolean enablePageReader;
+    try {
+      enablePageReader = Boolean.parseBoolean(enablePageReaderProperty);
+    } catch (Exception e) {
+      enablePageReader = Boolean.parseBoolean(
+          CarbonCommonConstants.CARBON_ENABLE_PAGE_LEVEL_READER_IN_COMPACTION_DEFAULT);
+    }
+    return enablePageReader;
+  }
+
 }


Mime
View raw message