carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ravipes...@apache.org
Subject [8/9] incubator-carbondata git commit: clean up core
Date Wed, 04 Jan 2017 13:35:02 GMT
clean up core


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/eaadc88a
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/eaadc88a
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/eaadc88a

Branch: refs/heads/master
Commit: eaadc88a5525ae283dfc328026c01f09e7aba20f
Parents: 82072ee
Author: jackylk <jacky.likun@huawei.com>
Authored: Wed Jan 4 20:56:40 2017 +0800
Committer: jackylk <jacky.likun@huawei.com>
Committed: Wed Jan 4 20:56:40 2017 +0800

----------------------------------------------------------------------
 .../org/apache/carbondata/core/cache/Cache.java |  11 +-
 .../AbstractColumnDictionaryInfo.java           |   3 +-
 .../dictionary/AbstractDictionaryCache.java     | 105 +++--
 .../dictionary/DictionaryBuilderException.java  |  63 +++
 .../dictionary/DoubleArrayTrieDictionary.java   | 384 -------------------
 .../dictionary/ForwardDictionaryCache.java      |  18 +-
 .../dictionary/ReverseDictionaryCache.java      |  18 +-
 .../core/carbon/CarbonDataLoadSchema.java       |  25 +-
 .../datastore/AbstractBlockIndexStoreCache.java |  14 +-
 .../core/carbon/datastore/BlockIndexStore.java  |  20 +-
 .../core/carbon/datastore/DataRefNode.java      |  16 +-
 .../carbon/datastore/DataRefNodeFinder.java     |   2 -
 .../carbon/datastore/SegmentTaskIndexStore.java |  36 +-
 .../carbon/datastore/block/AbstractIndex.java   |   7 -
 .../core/carbon/datastore/block/BlockIndex.java |   1 -
 .../datastore/block/SegmentProperties.java      |   5 +-
 .../datastore/block/SegmentTaskIndex.java       |   1 -
 .../carbon/datastore/block/TableTaskInfo.java   |   3 +-
 .../chunk/DimensionColumnDataChunk.java         |   3 -
 .../reader/DimensionColumnChunkReader.java      |   8 +-
 .../chunk/reader/MeasureColumnChunkReader.java  |   7 +-
 .../reader/dimension/AbstractChunkReader.java   |   1 -
 ...mpressedDimensionChunkFileBasedReaderV1.java |   5 +-
 ...mpressedDimensionChunkFileBasedReaderV2.java |   7 +-
 .../measure/AbstractMeasureChunkReader.java     |   3 -
 ...CompressedMeasureChunkFileBasedReaderV1.java |   7 +-
 ...CompressedMeasureChunkFileBasedReaderV2.java |  14 +-
 .../exception/IndexBuilderException.java        |   4 +-
 .../impl/btree/AbstractBTreeBuilder.java        |   3 +-
 .../impl/btree/AbstractBTreeLeafNode.java       |  13 +-
 .../impl/btree/BTreeDataRefNodeFinder.java      |   4 -
 .../datastore/impl/btree/BTreeNonLeafNode.java  |   5 -
 .../impl/btree/BlockBTreeLeafNode.java          |   1 -
 .../impl/btree/BlockletBTreeLeafNode.java       |  11 +-
 .../compressor/ChunkCompressorMeta.java         |  56 ---
 .../blocklet/compressor/CompressionCodec.java   |  31 --
 .../metadata/blocklet/datachunk/DataChunk.java  | 105 +----
 .../metadata/blocklet/sort/SortState.java       |  38 --
 .../ThriftWrapperSchemaConverterImpl.java       |   2 -
 .../core/carbon/metadata/encoder/Encoding.java  |   2 +-
 .../metadata/schema/table/CarbonTable.java      |   4 -
 .../carbon/metadata/schema/table/TableInfo.java |  28 --
 .../metadata/schema/table/TableSchema.java      |  30 --
 .../schema/table/column/CarbonMeasure.java      |  19 -
 .../schema/table/column/ColumnSchema.java       |  21 -
 .../core/carbon/path/CarbonTablePath.java       |   1 -
 .../QueryStatisticsRecorderDummy.java           |   2 +-
 .../core/compression/DoubleCompressor.java      |   6 +-
 .../core/constants/CarbonCommonConstants.java   |  26 --
 .../core/datastorage/store/FileHolder.java      |  15 +-
 .../core/datastorage/store/NodeKeyStore.java    |  64 ----
 .../datastorage/store/NodeMeasureDataStore.java |   6 -
 .../store/columnar/ColumnarKeyStore.java        |  47 ---
 .../columnar/ColumnarKeyStoreDataHolder.java    |  45 +--
 .../store/columnar/ColumnarKeyStoreInfo.java    | 262 -------------
 .../columnar/ColumnarKeyStoreMetadata.java      | 107 +-----
 .../store/compression/WriterCompressModel.java  |   3 +-
 .../decimal/UnCompressByteArray.java            |   2 +-
 .../decimal/UnCompressMaxMinFloat.java          |   2 +-
 .../compression/none/UnCompressNoneDefault.java |   3 -
 .../compression/none/UnCompressNoneLong.java    |   6 +-
 .../store/impl/DFSFileHolderImpl.java           | 111 ++----
 .../datastorage/store/impl/FileFactory.java     |  68 ----
 .../datastorage/store/impl/FileHolderImpl.java  |  67 ++--
 .../store/impl/MemoryMappedFileHolderImpl.java  | 118 ------
 ...ractHeavyCompressedDoubleArrayDataStore.java |   8 +-
 ...yCompressedDoubleArrayDataInMemoryStore.java |  34 --
 .../dictionary/generator/DictionaryWriter.java  |   3 +-
 .../IncrementalColumnDictionaryGenerator.java   |   6 +-
 .../generator/ServerDictionaryGenerator.java    |   2 +-
 .../generator/TableDictionaryGenerator.java     |   9 +-
 .../dictionary/server/DictionaryServer.java     |   2 +-
 .../core/keygenerator/KeyGenerator.java         |   3 -
 .../keygenerator/columnar/ColumnarSplitter.java |  18 -
 .../MultiDimKeyVarLengthEquiSplitGenerator.java |  26 +-
 ...tiDimKeyVarLengthVariableSplitGenerator.java |  14 +-
 .../TimeStampGranularityConstants.java          |   4 +-
 .../TimeStampGranularityTypeValue.java          |   2 +-
 .../mdkey/AbstractKeyGenerator.java             |   2 +-
 .../mdkey/MultiDimKeyVarLengthGenerator.java    |  10 -
 .../core/memory/HeapMemoryAllocator.java        |   2 -
 .../carbondata/core/metadata/BlockletInfo.java  | 202 ----------
 .../core/metadata/BlockletInfoColumnar.java     |  49 ---
 .../core/reader/CarbonDictionaryReaderImpl.java |   8 +-
 .../core/reader/CarbonFooterReader.java         |   5 +-
 .../carbondata/core/reader/ThriftReader.java    |   2 +-
 .../CarbonDictionarySortIndexReader.java        |   4 +-
 .../core/service/ColumnUniqueIdService.java     |   2 +-
 .../core/service/DictionaryService.java         |  10 +-
 .../carbondata/core/unsafe/CarbonUnsafe.java    |   2 +-
 .../util/AbstractDataFileFooterConverter.java   |  61 +--
 .../carbondata/core/util/CarbonMergerUtil.java  |   4 +-
 .../core/util/CarbonMetadataUtil.java           |   2 +-
 .../carbondata/core/util/CarbonProperties.java  |  16 -
 .../core/util/CarbonTimeStatisticsFactory.java  |   8 +-
 .../apache/carbondata/core/util/CarbonUtil.java | 174 +++------
 .../core/util/CarbonUtilException.java          |  80 ----
 .../carbondata/core/util/CompressionFinder.java |   2 +-
 .../carbondata/core/util/DataTypeUtil.java      |   2 +-
 .../core/util/ValueCompressionUtil.java         |  10 +-
 .../core/writer/CarbonFooterWriter.java         |   2 -
 .../core/writer/CarbonIndexFileWriter.java      |   1 -
 .../core/writer/HierarchyValueWriterForCSV.java |   4 +-
 .../CarbonDictionarySortIndexWriter.java        |   7 +-
 .../CarbonDictionarySortIndexWriterImpl.java    |   5 +-
 .../CarbonDictionarySortInfoPreparator.java     |   4 +-
 .../sortindex/CarbonDictionarySortModel.java    |  12 +-
 .../impl/DictionaryBasedResultCollector.java    |   5 -
 .../DictionaryBasedVectorResultCollector.java   |   5 -
 .../collector/impl/RawBasedResultCollector.java |   5 -
 .../scan/complextypes/ArrayQueryType.java       |  54 +--
 .../scan/complextypes/ComplexQueryType.java     |  12 +-
 .../scan/complextypes/PrimitiveQueryType.java   |  42 +-
 .../scan/complextypes/StructQueryType.java      |  60 +--
 .../carbondata/scan/executor/QueryExecutor.java |   6 +-
 .../exception/QueryExecutionException.java      |   2 -
 .../executor/impl/AbstractQueryExecutor.java    |  47 +--
 .../scan/executor/impl/DetailQueryExecutor.java |   3 +-
 .../executor/impl/QueryExecutorProperties.java  |  24 --
 .../impl/VectorDetailQueryExecutor.java         |   6 +-
 .../scan/executor/infos/AggregatorInfo.java     |  44 ---
 .../scan/executor/infos/BlockExecutionInfo.java | 160 --------
 .../scan/executor/infos/KeyStructureInfo.java   |  19 -
 .../scan/executor/infos/SortInfo.java           | 125 ------
 .../scan/executor/util/QueryUtil.java           | 170 +-------
 .../scan/expression/BinaryExpression.java       |  16 -
 .../carbondata/scan/expression/Expression.java  |   1 -
 .../conditional/ConditionalExpression.java      |   6 -
 .../exception/FilterIllegalMemberException.java |   4 -
 .../exception/FilterUnsupportedException.java   |   4 -
 .../logical/BinaryLogicalExpression.java        |  18 -
 .../scan/filter/FilterExpressionProcessor.java  |  22 +-
 .../carbondata/scan/filter/FilterProcessor.java |   9 +-
 .../carbondata/scan/filter/FilterUtil.java      | 174 +++------
 .../scan/filter/GenericQueryType.java           |  22 +-
 .../filter/executer/AndFilterExecuterImpl.java  |   3 +-
 .../ExcludeColGroupFilterExecuterImpl.java      |   2 -
 .../executer/ExcludeFilterExecuterImpl.java     |   3 +-
 .../scan/filter/executer/FilterExecuter.java    |   4 +-
 .../IncludeColGroupFilterExecuterImpl.java      |   2 -
 .../executer/IncludeFilterExecuterImpl.java     |   8 +-
 .../filter/executer/OrFilterExecuterImpl.java   |   3 +-
 .../executer/RowLevelFilterExecuterImpl.java    |  86 ++---
 .../RowLevelRangeGrtThanFiterExecuterImpl.java  |   7 +-
 ...elRangeGrtrThanEquaToFilterExecuterImpl.java |   3 +-
 ...velRangeLessThanEqualFilterExecuterImpl.java |   3 +-
 .../RowLevelRangeLessThanFiterExecuterImpl.java |   3 +-
 .../RowLevelRangeTypeExecuterFacory.java        |   9 +-
 .../filter/resolver/AndFilterResolverImpl.java  |  56 ---
 .../resolver/ConditionalFilterResolverImpl.java |  39 +-
 .../filter/resolver/FilterResolverIntf.java     |  18 +-
 .../resolver/LogicalFilterResolverImpl.java     |   9 +-
 .../resolver/RestructureFilterResolverImpl.java | 208 ----------
 .../resolver/RowLevelFilterResolverImpl.java    |   7 +-
 .../RowLevelRangeFilterResolverImpl.java        |  51 +--
 .../DimColumnResolvedFilterInfo.java            |  59 +--
 .../MeasureColumnResolvedFilterInfo.java        |  37 +-
 .../visitable/ResolvedFilterInfoVisitable.java  |  38 --
 .../visitor/CustomTypeDictionaryVisitor.java    |  13 +-
 .../visitor/DictionaryColumnVisitor.java        |  33 +-
 .../visitor/NoDictionaryTypeVisitor.java        |   7 +-
 .../visitor/ResolvedFilterInfoVisitorIntf.java  |   4 +-
 .../carbondata/scan/model/CarbonQueryPlan.java  |  83 ----
 .../carbondata/scan/model/QueryColumn.java      |  41 --
 .../carbondata/scan/model/QueryModel.java       | 184 +--------
 .../carbondata/scan/model/SortOrderType.java    |  57 ---
 .../processor/AbstractDataBlockIterator.java    |  21 +-
 .../apache/carbondata/scan/result/Result.java   |  70 ----
 .../AbstractDetailQueryResultIterator.java      |   1 -
 .../iterator/DetailQueryResultIterator.java     |  10 +-
 .../scan/result/iterator/RawResultIterator.java |   2 +-
 .../scan/result/vector/CarbonColumnVector.java  |  24 +-
 .../vector/MeasureDataVectorProcessor.java      |   2 +-
 .../scan/scanner/AbstractBlockletScanner.java   |   8 +-
 .../scan/scanner/BlockletScanner.java           |   7 +-
 .../scan/scanner/impl/FilterScanner.java        |  29 +-
 .../dictionary/ReverseDictionaryCacheTest.java  |   5 +-
 .../store/impl/DFSFileHolderImplUnitTest.java   |  40 +-
 .../store/impl/FileHolderImplUnitTest.java      |  17 +-
 .../MemoryMappedFileHolderImplUnitTest.java     | 137 -------
 .../datastore/SegmentTaskIndexStoreTest.java    |   4 +-
 .../datastore/block/SegmentPropertiesTest.java  |   2 -
 .../block/SegmentPropertiesTestUtil.java        |   2 -
 ...ressedDimensionChunkFileBasedReaderTest.java |   5 +-
 ...mpressedMeasureChunkFileBasedReaderTest.java |   5 +-
 .../carbon/metadata/CarbonMetadataTest.java     |   1 -
 .../ThriftWrapperSchemaConverterImplTest.java   |   1 -
 .../metadata/schema/table/CarbonTableTest.java  |   1 -
 .../table/CarbonTableWithComplexTypesTest.java  |   1 -
 .../ColumnarKeyStoreDataHolderUnitTest.java     |   2 +-
 .../carbondata/core/util/CarbonUtilTest.java    |  56 +--
 .../core/writer/CarbonFooterWriterTest.java     |   4 +-
 .../CarbonDictionarySortInfoPreparatorTest.java |   9 +-
 .../impl/RawBasedResultCollectorTest.java       |   1 -
 .../carbondata/scan/filter/FilterUtilTest.java  |  65 +---
 .../scanner/impl/FilterScannerTest.java         |  12 +-
 .../carbondata/hadoop/CacheAccessClient.java    |  12 +-
 .../carbondata/hadoop/CarbonInputFormat.java    |  41 +-
 .../internal/index/impl/InMemoryBTreeIndex.java |  35 +-
 .../hadoop/readsupport/CarbonReadSupport.java   |   8 +-
 .../AbstractDictionaryDecodedReadSupport.java   |  17 +-
 .../hadoop/util/CarbonInputFormatUtil.java      |   1 -
 .../hadoop/test/util/StoreCreator.java          |   4 +-
 .../carbondata/spark/load/CarbonLoaderUtil.java |  14 +-
 .../spark/merger/CarbonCompactionExecutor.java  |  18 +-
 .../spark/merger/CarbonCompactionUtil.java      |  12 +-
 .../carbondata/spark/rdd/CarbonMergerRDD.scala  |   5 +-
 .../execution/command/carbonTableSchema.scala   |   1 -
 .../readsupport/SparkRowReadSupportImpl.java    |   3 +-
 .../scala/org/apache/spark/sql/CarbonScan.scala |  10 -
 .../spark/sql/SparkUnknownExpression.scala      |  17 +-
 .../readsupport/SparkRowReadSupportImpl.java    |   4 +-
 .../scala/org/apache/spark/sql/CarbonScan.scala |   3 -
 .../processing/csvload/DataGraphExecuter.java   | 128 ++-----
 .../processing/csvload/GraphExecutionUtil.java  |   2 +-
 .../processing/datatypes/PrimitiveDataType.java |   9 +-
 .../processing/mdkeygen/MDKeyGenStep.java       |   5 +-
 .../newflow/AbstractDataLoadProcessorStep.java  |   5 +-
 .../newflow/converter/RowConverter.java         |   4 +-
 .../impl/DictionaryFieldConverterImpl.java      |  29 +-
 .../converter/impl/FieldEncoderFactory.java     |   4 +-
 .../converter/impl/RowConverterImpl.java        |  15 +-
 .../impl/UnsafeParallelReadMergeSorterImpl.java |   2 +-
 .../newflow/sort/unsafe/UnsafeSortDataRows.java |  20 +-
 .../merger/UnsafeIntermediateFileMerger.java    |   3 +-
 .../steps/DataConverterProcessorStepImpl.java   |   4 +-
 ...ConverterProcessorWithBucketingStepImpl.java |   4 +-
 .../steps/DataWriterProcessorStepImpl.java      |   3 +-
 .../newflow/steps/SortProcessorStepImpl.java    |   3 +-
 .../sortdata/IntermediateFileMerger.java        |   3 +-
 .../FileStoreSurrogateKeyGenForCSV.java         |  55 +--
 .../util/CarbonDataProcessorUtil.java           |  40 +-
 .../carbon/datastore/BlockIndexStoreTest.java   |  12 +-
 .../carbondata/test/util/StoreCreator.java      |   4 +-
 234 files changed, 946 insertions(+), 5250 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/cache/Cache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/Cache.java b/core/src/main/java/org/apache/carbondata/core/cache/Cache.java
index c87e7d9..1734385 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/Cache.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/Cache.java
@@ -19,10 +19,9 @@
 
 package org.apache.carbondata.core.cache;
 
+import java.io.IOException;
 import java.util.List;
 
-import org.apache.carbondata.core.util.CarbonUtilException;
-
 /**
  * A semi-persistent mapping from keys to values. Cache entries are manually added using
  * #get(Key), #getAll(List<Keys>) , and are stored in the cache until
@@ -41,9 +40,9 @@ public interface Cache<K, V> {
    *
    * @param key
    * @return
-   * @throws CarbonUtilException in case memory is not sufficient to load data into memory
+   * @throws IOException in case memory is not sufficient to load data into memory
    */
-  V get(K key) throws CarbonUtilException;
+  V get(K key) throws IOException;
 
   /**
    * This method will return a list of values for the given list of keys.
@@ -51,9 +50,9 @@ public interface Cache<K, V> {
    * Access count of Cacheable entry will be incremented
    * @param keys
    * @return
-   * @throws CarbonUtilException in case memory is not sufficient to load data into memory
+   * @throws IOException in case memory is not sufficient to load data into memory
    */
-  List<V> getAll(List<K> keys) throws CarbonUtilException;
+  List<V> getAll(List<K> keys) throws IOException;
 
   /**
    * This method will return the value for the given key. It will not check and load

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
index ab400f0..640a6b2 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractColumnDictionaryInfo.java
@@ -151,8 +151,7 @@ public abstract class AbstractColumnDictionaryInfo implements DictionaryInfo {
    * @return
    */
   @Override public DictionaryChunksWrapper getDictionaryChunks() {
-    DictionaryChunksWrapper chunksWrapper = new DictionaryChunksWrapper(dictionaryChunks);
-    return chunksWrapper;
+    return new DictionaryChunksWrapper(dictionaryChunks);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractDictionaryCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractDictionaryCache.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractDictionaryCache.java
index 5fbef5d..b554d34 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractDictionaryCache.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/AbstractDictionaryCache.java
@@ -35,7 +35,6 @@ import org.apache.carbondata.core.reader.CarbonDictionaryMetadataReader;
 import org.apache.carbondata.core.service.DictionaryService;
 import org.apache.carbondata.core.service.PathService;
 import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.core.util.CarbonUtilException;
 
 /**
  * Abstract class which implements methods common to reverse and forward dictionary cache
@@ -137,8 +136,7 @@ public abstract class AbstractDictionaryCache<K extends DictionaryColumnUniqueId
         carbonTablePath.getDictionaryMetaFilePath(dictionaryColumnUniqueIdentifier
             .getColumnIdentifier().getColumnId());
     FileFactory.FileType fileType = FileFactory.getFileType(dictionaryFilePath);
-    CarbonFile carbonFile = FileFactory.getCarbonFile(dictionaryFilePath, fileType);
-    return carbonFile;
+    return FileFactory.getCarbonFile(dictionaryFilePath, fileType);
   }
 
   /**
@@ -149,65 +147,62 @@ public abstract class AbstractDictionaryCache<K extends DictionaryColumnUniqueId
    *                                         tableName and columnIdentifier
    * @param dictionaryInfo
    * @param lruCacheKey
-   * @param loadSortIndex                    read and load sort index file in memory
-   * @throws CarbonUtilException in case memory is not sufficient to load dictionary into memory
+   * @throws IOException                    in case memory is not sufficient to load dictionary
+   *                                        into memory
    */
   protected void checkAndLoadDictionaryData(
       DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier,
       DictionaryInfo dictionaryInfo, String lruCacheKey, boolean loadSortIndex)
-      throws CarbonUtilException {
-    try {
-      // read last segment dictionary meta chunk entry to get the end offset of file
-      CarbonFile carbonFile = getDictionaryMetaCarbonFile(dictionaryColumnUniqueIdentifier);
-      boolean dictionaryMetaFileModified =
-          isDictionaryMetaFileModified(carbonFile, dictionaryInfo.getFileTimeStamp(),
-              dictionaryInfo.getDictionaryMetaFileLength());
-      // if dictionary metadata file is modified then only read the last entry from dictionary
-      // meta file
-      if (dictionaryMetaFileModified) {
-        synchronized (dictionaryInfo) {
-          carbonFile = getDictionaryMetaCarbonFile(dictionaryColumnUniqueIdentifier);
-          dictionaryMetaFileModified =
-              isDictionaryMetaFileModified(carbonFile, dictionaryInfo.getFileTimeStamp(),
-                  dictionaryInfo.getDictionaryMetaFileLength());
-          // Double Check :
-          // if dictionary metadata file is modified then only read the last entry from dictionary
-          // meta file
-          if (dictionaryMetaFileModified) {
-            CarbonDictionaryColumnMetaChunk carbonDictionaryColumnMetaChunk =
-                readLastChunkFromDictionaryMetadataFile(dictionaryColumnUniqueIdentifier);
-            // required size will be size total size of file - offset till file is
-            // already read
-            long requiredSize =
-                carbonDictionaryColumnMetaChunk.getEnd_offset() - dictionaryInfo.getMemorySize();
-            if (requiredSize > 0) {
-              boolean columnAddedToLRUCache =
-                  carbonLRUCache.put(lruCacheKey, dictionaryInfo, requiredSize);
-              // if column is successfully added to lru cache then only load the
-              // dictionary data
-              if (columnAddedToLRUCache) {
-                // load dictionary data
-                loadDictionaryData(dictionaryInfo, dictionaryColumnUniqueIdentifier,
-                    dictionaryInfo.getMemorySize(), carbonDictionaryColumnMetaChunk.getEnd_offset(),
-                    loadSortIndex);
-                // set the end offset till where file is read
-                dictionaryInfo
-                    .setOffsetTillFileIsRead(carbonDictionaryColumnMetaChunk.getEnd_offset());
-                dictionaryInfo.setFileTimeStamp(carbonFile.getLastModifiedTime());
-                dictionaryInfo.setDictionaryMetaFileLength(carbonFile.getSize());
-              } else {
-                throw new CarbonUtilException(
-                    "Cannot load dictionary into memory. Not enough memory available");
-              }
+      throws IOException {
+    // read last segm
+    // ent dictionary meta chunk entry to get the end offset of file
+    CarbonFile carbonFile = getDictionaryMetaCarbonFile(dictionaryColumnUniqueIdentifier);
+    boolean dictionaryMetaFileModified =
+        isDictionaryMetaFileModified(carbonFile, dictionaryInfo.getFileTimeStamp(),
+            dictionaryInfo.getDictionaryMetaFileLength());
+    // if dictionary metadata file is modified then only read the last entry from dictionary
+    // meta file
+    if (dictionaryMetaFileModified) {
+      synchronized (dictionaryInfo) {
+        carbonFile = getDictionaryMetaCarbonFile(dictionaryColumnUniqueIdentifier);
+        dictionaryMetaFileModified =
+            isDictionaryMetaFileModified(carbonFile, dictionaryInfo.getFileTimeStamp(),
+                dictionaryInfo.getDictionaryMetaFileLength());
+        // Double Check :
+        // if dictionary metadata file is modified then only read the last entry from dictionary
+        // meta file
+        if (dictionaryMetaFileModified) {
+          CarbonDictionaryColumnMetaChunk carbonDictionaryColumnMetaChunk =
+              readLastChunkFromDictionaryMetadataFile(dictionaryColumnUniqueIdentifier);
+          // required size will be size total size of file - offset till file is
+          // already read
+          long requiredSize =
+              carbonDictionaryColumnMetaChunk.getEnd_offset() - dictionaryInfo.getMemorySize();
+          if (requiredSize > 0) {
+            boolean columnAddedToLRUCache =
+                carbonLRUCache.put(lruCacheKey, dictionaryInfo, requiredSize);
+            // if column is successfully added to lru cache then only load the
+            // dictionary data
+            if (columnAddedToLRUCache) {
+              // load dictionary data
+              loadDictionaryData(dictionaryInfo, dictionaryColumnUniqueIdentifier,
+                  dictionaryInfo.getMemorySize(), carbonDictionaryColumnMetaChunk.getEnd_offset(),
+                  loadSortIndex);
+              // set the end offset till where file is read
+              dictionaryInfo
+                  .setOffsetTillFileIsRead(carbonDictionaryColumnMetaChunk.getEnd_offset());
+              dictionaryInfo.setFileTimeStamp(carbonFile.getLastModifiedTime());
+              dictionaryInfo.setDictionaryMetaFileLength(carbonFile.getSize());
+            } else {
+              throw new DictionaryBuilderException(
+                  "Cannot load dictionary into memory. Not enough memory available");
             }
           }
         }
       }
-      // increment the column access count
-      incrementDictionaryAccessCount(dictionaryInfo);
-    } catch (IOException e) {
-      throw new CarbonUtilException(e.getMessage());
     }
+    // increment the column access count
+    incrementDictionaryAccessCount(dictionaryInfo);
   }
 
   /**
@@ -217,9 +212,7 @@ public abstract class AbstractDictionaryCache<K extends DictionaryColumnUniqueId
    * @return
    */
   protected String getLruCacheKey(String columnIdentifier, CacheType cacheType) {
-    String lruCacheKey =
-        columnIdentifier + CarbonCommonConstants.UNDERSCORE + cacheType.getCacheName();
-    return lruCacheKey;
+    return columnIdentifier + CarbonCommonConstants.UNDERSCORE + cacheType.getCacheName();
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryBuilderException.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryBuilderException.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryBuilderException.java
new file mode 100644
index 0000000..39635a3
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryBuilderException.java
@@ -0,0 +1,63 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.carbondata.core.cache.dictionary;
+
+import java.io.IOException;
+
+public class DictionaryBuilderException extends IOException {
+  /**
+   * default serial version ID.
+   */
+  private static final long serialVersionUID = 21312121L;
+
+  /**
+   * The Error message.
+   */
+  private String msg = "";
+
+  /**
+   * Constructor
+   *
+   * @param msg       The error message for this exception.
+   */
+  public DictionaryBuilderException(String msg) {
+    super(msg);
+    this.msg = msg;
+  }
+
+  /**
+   * getLocalizedMessage
+   */
+  @Override public String getLocalizedMessage() {
+    return super.getLocalizedMessage();
+  }
+
+  /**
+   * getMessage
+   */
+  public String getMessage() {
+    return this.msg;
+  }
+
+  @Override public String toString() {
+    return "DictionaryBuilderException: " + msg;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
deleted file mode 100644
index 13d9233..0000000
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
+++ /dev/null
@@ -1,384 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.cache.dictionary;
-
-import java.io.*;
-import java.util.SortedSet;
-import java.util.TreeSet;
-
-/**
- * A dictionary based on DoubleArrayTrie data structure that maps enumerations
- * of byte[] to int IDs. With DoubleArrayTrie the memory footprint of the mapping
- * is minimize,d if compared to HashMap.
- * This DAT implementation is inspired by https://linux.thai.net/~thep/datrie/datrie.html
- */
-
-public class DoubleArrayTrieDictionary {
-  public static final byte[] HEAD_MAGIC = new byte[]{
-      0x44, 0x41, 0x54, 0x54, 0x72, 0x69, 0x65, 0x44, 0x69, 0x63, 0x74
-  }; // "DATTrieDict"
-  public static final int HEAD_LEN = HEAD_MAGIC.length;
-
-  private static final int INIT_CAPA_VALUE = 256;  // init len of double array
-  private static final int BASE_ROOT_VALUE = 1;    // root base value of trie root
-  private static final int CHCK_ROOT_VALUE = -1;   // root check value of trie root
-  private static final int UUSD_ROOM_VALUE = -2;   // unused position, only for zero
-  private static final int EPTY_BACK_VALUE = 0;    // value of empty position
-
-  private static final int ENCODE_BASE_VALUE = 10; // encode start number
-
-  private int[] base;
-  private int[] check;
-  private int size;
-  private int capacity;
-
-  private int id = ENCODE_BASE_VALUE;
-
-  public DoubleArrayTrieDictionary() {
-    base = new int[INIT_CAPA_VALUE];
-    check = new int[INIT_CAPA_VALUE];
-    capacity = INIT_CAPA_VALUE;
-    base[0] = UUSD_ROOM_VALUE;
-    check[0] = UUSD_ROOM_VALUE;
-    base[1] = BASE_ROOT_VALUE;
-    check[1] = CHCK_ROOT_VALUE;
-    size = 2;
-  }
-
-  private void init(int capacity, int size, int[] base, int[] check) {
-    int blen = base.length;
-    int clen = check.length;
-    if (capacity < size || size < 0 || blen != clen) {
-      throw new IllegalArgumentException("Illegal init parameters");
-    }
-    this.base = new int[capacity];
-    this.check = new int[capacity];
-    this.capacity = capacity;
-    System.arraycopy(base, 0, this.base, 0, blen);
-    System.arraycopy(check, 0, this.check, 0, clen);
-    this.size = size;
-  }
-
-  public void clear() {
-    base = null;
-    check = null;
-    size = 0;
-    capacity = 0;
-  }
-
-  private int reSize(int newCapacity) {
-    if (newCapacity < capacity) {
-      return capacity;
-    }
-    int[] newBase = new int[newCapacity];
-    int[] newCheck = new int[newCapacity];
-    if (capacity > 0) {
-      System.arraycopy(base, 0, newBase, 0, capacity);
-      System.arraycopy(check, 0, newCheck, 0, capacity);
-    }
-    base = newBase;
-    check = newCheck;
-    capacity = newCapacity;
-    return capacity;
-  }
-
-  public int getSize() {
-    return size;
-  }
-
-  public int getCapacity() {
-    return capacity;
-  }
-
-  /**
-   * Get encode value of key
-   *
-   * @param key
-   * @return
-   */
-  public int getValue(String key) {
-    String k = key + '\0';
-    byte[] bKeys = k.getBytes();
-    return getValue(bKeys);
-  }
-
-  /**
-   * Get encode value of bKeys
-   *
-   * @param bKeys
-   * @return
-   */
-  private int getValue(byte[] bKeys) {
-    int from = 1;
-    int to;
-    int current;
-    int len = bKeys.length;
-    if (size == 0) return -1;
-    for (int i = 0; i < len; i++) {
-      current = bKeys[i] & 0xFF;
-      to = base[from] + current;
-      if (check[to] != from) return -1;
-      int baseValue = base[to];
-      if (baseValue <= -ENCODE_BASE_VALUE) {
-        if (i == len - 1) {
-          return -1 * baseValue;
-        } else {
-          return -1;
-        }
-      }
-      from = to;
-
-    }
-    return -1;
-  }
-
-  /**
-   * Get all children of one node
-   *
-   * @param pos
-   * @return
-   */
-  private TreeSet<Integer> getChildren(int pos) {
-    TreeSet<Integer> children = new TreeSet<Integer>();
-    for (int i = 0; i < 0xFF; i++) {
-      int cpos = base[pos] + i;
-      if (cpos >= size) break;
-      if (cpos < 0) {
-        return null;
-      }
-      if (check[cpos] == pos) {
-        children.add(new Integer(i));
-      }
-    }
-    return children;
-  }
-
-  /**
-   * @TODO: need to optimize performance
-   *
-   * Find multiple free position for {values}
-   * the distance between free position should be as same as {values}
-   *
-   * @param values
-   * @return
-   */
-  private int findFreeRoom(SortedSet<Integer> values) {
-    int min = values.first();
-    int max = values.last();
-    for (int i = min + 1; i < capacity; i++) {
-      if (i + max >= capacity) {
-        reSize(capacity + values.size());
-      }
-      int res = 0;
-      for (Integer v : values) {
-        res = res | base[v - min + i];
-      }
-      if (res == EPTY_BACK_VALUE) return i - min;
-    }
-    return -1;
-  }
-
-  /**
-   * Find one empty position for value
-   *
-   * @param value
-   * @return
-   */
-  private int findAvailableHop(int value) {
-    reSize(size + 1);
-    int result = size - 1;
-    for (int i = value + 1; i < capacity; i++) {
-      if (base[i] == EPTY_BACK_VALUE) {
-        result = i - value;
-        break;
-      }
-    }
-    return result;
-  }
-
-  /**
-   * Resolve when conflict and reset current node and its children.
-   *
-   * @param start current conflict position
-   * @param bKey current byte value which for processing
-   * @return
-   */
-  private int conflict(int start, int bKey) {
-    int from = start;
-    int newKey = bKey;
-    TreeSet<Integer> children = getChildren(from);
-    children.add(new Integer(newKey));
-    int newBasePos = findFreeRoom(children);
-    children.remove(new Integer(newKey));
-
-    int oldBasePos = base[start];
-    base[start] = newBasePos;
-
-    int oldPos, newPos;
-    for (Integer child : children) {
-      oldPos = oldBasePos + child;
-      newPos = newBasePos + child;
-      if (oldPos == from) from = newPos;
-      base[newPos] = base[oldPos];
-      check[newPos] = check[oldPos];
-      if (newPos >= size) size = newPos + 1;
-      if (base[oldPos] > 0) {
-        TreeSet<Integer> cs = getChildren(oldPos);
-        for (Integer c : cs) {
-          check[base[oldPos] + c] = newPos;
-        }
-      }
-      base[oldPos] = EPTY_BACK_VALUE;
-      check[oldPos] = EPTY_BACK_VALUE;
-    }
-    return from;
-  }
-
-  /**
-   * Insert element (byte[]) into DAT.
-   * 1. if the element has been DAT then return.
-   * 2. if position which is empty then insert directly.
-   * 3. if conflict then resolve it.
-   *
-   * @param bKeys
-   * @return
-   */
-  private boolean insert(byte[] bKeys) {
-    int from = 1;
-    int klen = bKeys.length;
-    for (int i = 0; i < klen; i++) {
-      int c = bKeys[i] & 0xFF;
-      int to = base[from] + c;
-      reSize((int) (to * 1.2) + 1);
-      if (check[to] == from) {
-        if (i == klen - 1) return true;
-        from = to;
-      } else if (check[to] == EPTY_BACK_VALUE) {
-        check[to] = from;
-        if (i == klen - 1) {
-          base[to] = -id;
-          id = id + 1;
-          return true;
-        } else {
-          int next = bKeys[i + 1] & 0xFF;
-          base[to] = findAvailableHop(next);
-          from = to;
-        }
-        if (to >= size) size = to + 1;
-      } else {
-        int rConflict = conflict(from, c);
-        int locate = base[rConflict] + c;
-        if (check[locate] != EPTY_BACK_VALUE) {
-          System.err.println("conflict");
-        }
-        check[locate] = rConflict;
-        if (i == klen - 1) {
-          base[locate] = -id;
-          id = id + 1;
-        } else {
-          int nah = bKeys[i + 1] & 0xFF;
-          base[locate] = findAvailableHop(nah);
-        }
-        if (locate >= size) size = locate + 1;
-        from = locate;
-        if (i == klen - 1) {
-          return true;
-        }
-      }
-    }
-    return false;
-  }
-
-  /**
-   * Insert element (String) into DAT, the element will be transformed to
-   * byte[] firstly then insert into DAT.
-   *
-   * @param key
-   * @return
-   */
-  public boolean insert(String key) {
-    String k = key + '\0';
-    byte[] bKeys = k.getBytes();
-    if (!insert(bKeys)) {
-      return false;
-    }
-    return true;
-  }
-
-  /**
-   * Serialize the DAT to data output stream
-   *
-   * @param out
-   * @throws IOException
-   */
-  public void write(DataOutputStream out) throws IOException {
-    out.write(HEAD_MAGIC);
-    out.writeInt(capacity);
-    out.writeInt(size);
-    for (int i = 0; i < size; i++) {
-      out.writeInt(base[i]);
-    }
-    for (int i = 0; i < size; i++) {
-      out.writeInt(check[i]);
-    }
-  }
-
-  /**
-   * Deserialize the DAT from data input stream
-   *
-   * @param in
-   * @throws IOException
-   */
-  public void read(DataInputStream in) throws IOException {
-    byte[] header = new byte[HEAD_LEN];
-    in.read(header);
-    int comp = 0;
-    for (int i = 0; i < HEAD_LEN; i++) {
-      comp = HEAD_MAGIC[i] - header[i];
-      if (comp != 0) break;
-    }
-    if (comp != 0) throw new IllegalArgumentException("Illegal file type");
-    int capacity = in.readInt();
-    int size = in.readInt();
-    if (capacity < size || size < 0) throw new IllegalArgumentException("Illegal parameters");
-    int[] base = new int[size];
-    int[] check = new int[size];
-    for (int i = 0; i < size; i++) {
-      base[i] = in.readInt();
-    }
-    for (int i = 0; i < size; i++) {
-      check[i] = in.readInt();
-    }
-    init(capacity, size, base, check);
-  }
-
-  /**
-   * Dump double array value about Trie
-   */
-  public void dump(PrintStream out) {
-    out.println("Capacity = " + capacity + ", Size = " + size);
-    for (int i = 0; i < size; i++) {
-      if (base[i] != EPTY_BACK_VALUE) {
-        out.print(i + ":[" + base[i] + "," + check[i] + "], ");
-      }
-    }
-    out.println();
-  }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
index ff30c73..2dadfe4 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ForwardDictionaryCache.java
@@ -19,6 +19,7 @@
 
 package org.apache.carbondata.core.cache.dictionary;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.Callable;
@@ -31,7 +32,6 @@ import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.CacheType;
 import org.apache.carbondata.core.cache.CarbonLRUCache;
-import org.apache.carbondata.core.util.CarbonUtilException;
 
 /**
  * This class implements methods to create dictionary cache which will hold
@@ -62,10 +62,10 @@ public class ForwardDictionaryCache<K extends DictionaryColumnUniqueIdentifier,
    * @param dictionaryColumnUniqueIdentifier unique identifier which contains dbName,
    *                                         tableName and columnIdentifier
    * @return dictionary
-   * @throws CarbonUtilException in case memory is not sufficient to load dictionary into memory
+   * @throws IOException in case memory is not sufficient to load dictionary into memory
    */
   @Override public Dictionary get(DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier)
-      throws CarbonUtilException {
+      throws IOException {
     return getDictionary(dictionaryColumnUniqueIdentifier);
   }
 
@@ -76,11 +76,11 @@ public class ForwardDictionaryCache<K extends DictionaryColumnUniqueIdentifier,
    * @param dictionaryColumnUniqueIdentifiers unique identifier which contains dbName,
    *                                          tableName and columnIdentifier
    * @return list of dictionary
-   * @throws CarbonUtilException in case memory is not sufficient to load dictionary into memory
+   * @throws IOException in case memory is not sufficient to load dictionary into memory
    */
   @Override public List<Dictionary> getAll(
       List<DictionaryColumnUniqueIdentifier> dictionaryColumnUniqueIdentifiers)
-      throws CarbonUtilException {
+      throws IOException {
     boolean exceptionOccurredInDictionaryLoading = false;
     String exceptionMessage = "";
     List<Dictionary> forwardDictionaryObjectList =
@@ -90,7 +90,7 @@ public class ForwardDictionaryCache<K extends DictionaryColumnUniqueIdentifier,
     ExecutorService executorService = Executors.newFixedThreadPool(thread_pool_size);
     for (final DictionaryColumnUniqueIdentifier uniqueIdent : dictionaryColumnUniqueIdentifiers) {
       taskSubmitList.add(executorService.submit(new Callable<Dictionary>() {
-        @Override public Dictionary call() throws CarbonUtilException {
+        @Override public Dictionary call() throws IOException {
           Dictionary dictionary = getDictionary(uniqueIdent);
           return dictionary;
         }
@@ -114,7 +114,7 @@ public class ForwardDictionaryCache<K extends DictionaryColumnUniqueIdentifier,
     if (exceptionOccurredInDictionaryLoading) {
       clearDictionary(forwardDictionaryObjectList);
       LOGGER.error(exceptionMessage);
-      throw new CarbonUtilException(exceptionMessage);
+      throw new IOException(exceptionMessage);
     }
     return forwardDictionaryObjectList;
   }
@@ -160,11 +160,11 @@ public class ForwardDictionaryCache<K extends DictionaryColumnUniqueIdentifier,
    * @param dictionaryColumnUniqueIdentifier unique identifier which contains dbName,
    *                                         tableName and columnIdentifier
    * @return dictionary
-   * @throws CarbonUtilException in case memory is not sufficient to load dictionary into memory
+   * @throws IOException in case memory is not sufficient to load dictionary into memory
    */
   private Dictionary getDictionary(
       DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier)
-      throws CarbonUtilException {
+      throws IOException {
     Dictionary forwardDictionary = null;
     // dictionary is only for primitive data type
     assert (!dictionaryColumnUniqueIdentifier.getDataType().isComplexType());

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionaryCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionaryCache.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionaryCache.java
index fab767f..435f5a7 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionaryCache.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ReverseDictionaryCache.java
@@ -19,6 +19,7 @@
 
 package org.apache.carbondata.core.cache.dictionary;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.Callable;
@@ -31,7 +32,6 @@ import org.apache.carbondata.common.logging.LogService;
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.cache.CacheType;
 import org.apache.carbondata.core.cache.CarbonLRUCache;
-import org.apache.carbondata.core.util.CarbonUtilException;
 
 /**
  * This class implements methods to create dictionary cache which will hold
@@ -62,10 +62,10 @@ public class ReverseDictionaryCache<K extends DictionaryColumnUniqueIdentifier,
    * @param dictionaryColumnUniqueIdentifier unique identifier which contains dbName,
    *                                         tableName and columnIdentifier
    * @return dictionary
-   * @throws CarbonUtilException in case memory is not sufficient to load dictionary into memory
+   * @throws IOException in case memory is not sufficient to load dictionary into memory
    */
   @Override public Dictionary get(DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier)
-      throws CarbonUtilException {
+      throws IOException {
     return getDictionary(dictionaryColumnUniqueIdentifier);
   }
 
@@ -76,11 +76,11 @@ public class ReverseDictionaryCache<K extends DictionaryColumnUniqueIdentifier,
    * @param dictionaryColumnUniqueIdentifiers unique identifier which contains dbName,
    *                                          tableName and columnIdentifier
    * @return list of dictionary
-   * @throws CarbonUtilException in case memory is not sufficient to load dictionary into memory
+   * @throws IOException in case memory is not sufficient to load dictionary into memory
    */
   @Override public List<Dictionary> getAll(
       List<DictionaryColumnUniqueIdentifier> dictionaryColumnUniqueIdentifiers)
-      throws CarbonUtilException {
+      throws IOException {
     boolean exceptionOccurredInDictionaryLoading = false;
     String exceptionMessage = "";
     List<Dictionary> reverseDictionaryObjectList =
@@ -90,7 +90,7 @@ public class ReverseDictionaryCache<K extends DictionaryColumnUniqueIdentifier,
     ExecutorService executorService = Executors.newFixedThreadPool(thread_pool_size);
     for (final DictionaryColumnUniqueIdentifier uniqueIdent : dictionaryColumnUniqueIdentifiers) {
       taskSubmitList.add(executorService.submit(new Callable<Dictionary>() {
-        @Override public Dictionary call() throws CarbonUtilException {
+        @Override public Dictionary call() throws IOException {
           Dictionary dictionary = getDictionary(uniqueIdent);
           return dictionary;
         }
@@ -114,7 +114,7 @@ public class ReverseDictionaryCache<K extends DictionaryColumnUniqueIdentifier,
     if (exceptionOccurredInDictionaryLoading) {
       clearDictionary(reverseDictionaryObjectList);
       LOGGER.error(exceptionMessage);
-      throw new CarbonUtilException(exceptionMessage);
+      throw new IOException(exceptionMessage);
     }
     return reverseDictionaryObjectList;
   }
@@ -161,11 +161,11 @@ public class ReverseDictionaryCache<K extends DictionaryColumnUniqueIdentifier,
    * @param dictionaryColumnUniqueIdentifier unique identifier which contains dbName,
    *                                         tableName and columnIdentifier
    * @return dictionary
-   * @throws CarbonUtilException in case memory is not sufficient to load dictionary into memory
+   * @throws IOException in case memory is not sufficient to load dictionary into memory
    */
   private Dictionary getDictionary(
       DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier)
-      throws CarbonUtilException {
+      throws IOException {
     Dictionary reverseDictionary = null;
     // dictionary is only for primitive data type
     assert(!dictionaryColumnUniqueIdentifier.getDataType().isComplexType());

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/CarbonDataLoadSchema.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/CarbonDataLoadSchema.java b/core/src/main/java/org/apache/carbondata/core/carbon/CarbonDataLoadSchema.java
index 7cfefc9..45b2bcd 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/CarbonDataLoadSchema.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/CarbonDataLoadSchema.java
@@ -65,15 +65,6 @@ public class CarbonDataLoadSchema implements Serializable {
   }
 
   /**
-   * set dimensionrelation list
-   *
-   * @param dimensionRelationList
-   */
-  public void setDimensionRelationList(List<DimensionRelation> dimensionRelationList) {
-    this.dimensionRelationList = dimensionRelationList;
-  }
-
-  /**
    * get carbontable
    *
    * @return carbonTable
@@ -98,11 +89,6 @@ public class CarbonDataLoadSchema implements Serializable {
     private String tableName;
 
     /**
-     * dimensionSource csv path
-     */
-    private String dimensionSource;
-
-    /**
      * relation with fact and dimension table
      */
     private Relation relation;
@@ -118,14 +104,12 @@ public class CarbonDataLoadSchema implements Serializable {
      * constructor
      *
      * @param tableName       - dimension table name
-     * @param dimensionSource - source file path
      * @param relation        - fact foreign key with dimension primary key mapping
      * @param columns         - list of columns to be used from this dimension table
      */
-    public DimensionRelation(String tableName, String dimensionSource, Relation relation,
+    public DimensionRelation(String tableName, Relation relation,
         List<String> columns) {
       this.tableName = tableName;
-      this.dimensionSource = dimensionSource;
       this.relation = relation;
       this.columns = columns;
     }
@@ -138,13 +122,6 @@ public class CarbonDataLoadSchema implements Serializable {
     }
 
     /**
-     * @return dimensionSource
-     */
-    public String getDimensionSource() {
-      return dimensionSource;
-    }
-
-    /**
      * @return relation
      */
     public Relation getRelation() {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/AbstractBlockIndexStoreCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/AbstractBlockIndexStoreCache.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/AbstractBlockIndexStoreCache.java
index c700a06..d676c02 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/AbstractBlockIndexStoreCache.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/AbstractBlockIndexStoreCache.java
@@ -19,7 +19,8 @@
 
 package org.apache.carbondata.core.carbon.datastore;
 
-import java.util.Arrays;
+import java.io.IOException;
+import java.util.Collections;
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.ConcurrentHashMap;
@@ -30,9 +31,9 @@ import org.apache.carbondata.core.carbon.datastore.block.AbstractIndex;
 import org.apache.carbondata.core.carbon.datastore.block.BlockInfo;
 import org.apache.carbondata.core.carbon.datastore.block.TableBlockInfo;
 import org.apache.carbondata.core.carbon.datastore.block.TableBlockUniqueIdentifier;
+import org.apache.carbondata.core.carbon.datastore.exception.IndexBuilderException;
 import org.apache.carbondata.core.carbon.metadata.blocklet.DataFileFooter;
 import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.CarbonUtilException;
 
 /**
  * This class validate and load the B-Tree in the executor lru cache
@@ -88,7 +89,7 @@ public abstract class AbstractBlockIndexStoreCache<K, V>
    */
   protected void checkAndLoadTableBlocks(AbstractIndex tableBlock,
       TableBlockUniqueIdentifier tableBlockUniqueIdentifier, String lruCacheKey)
-      throws CarbonUtilException {
+      throws IOException {
     // calculate the required size is
     TableBlockInfo blockInfo = tableBlockUniqueIdentifier.getTableBlockInfo();
     long requiredMetaSize = CarbonUtil
@@ -103,13 +104,12 @@ public abstract class AbstractBlockIndexStoreCache<K, V>
       if (isTableBlockAddedToLruCache) {
         // load table blocks data
         // getting the data file meta data of the block
-        DataFileFooter footer = CarbonUtil
-            .readMetadatFile(blockInfo);
+        DataFileFooter footer = CarbonUtil.readMetadatFile(blockInfo);
         footer.setBlockInfo(new BlockInfo(blockInfo));
         // building the block
-        tableBlock.buildIndex(Arrays.asList(footer));
+        tableBlock.buildIndex(Collections.singletonList(footer));
       } else {
-        throw new CarbonUtilException(
+        throw new IndexBuilderException(
             "Cannot load table blocks into memory. Not enough memory available");
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BlockIndexStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BlockIndexStore.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BlockIndexStore.java
index a452338..e431805 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BlockIndexStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BlockIndexStore.java
@@ -19,6 +19,7 @@
 
 package org.apache.carbondata.core.carbon.datastore;
 
+import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.List;
@@ -43,7 +44,6 @@ import org.apache.carbondata.core.carbon.datastore.block.TableBlockUniqueIdentif
 import org.apache.carbondata.core.carbon.datastore.exception.IndexBuilderException;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.core.util.CarbonUtilException;
 
 /**
  * This class is used to load the B-Tree in Executor LRU Cache
@@ -64,10 +64,9 @@ public class BlockIndexStore<K, V> extends AbstractBlockIndexStoreCache<K, V> {
    *
    * @param tableBlockUniqueIdentifier Uniquely identifies the block
    * @return returns the blocks B-Tree meta
-   * @throws CarbonUtilException
    */
   @Override public AbstractIndex get(TableBlockUniqueIdentifier tableBlockUniqueIdentifier)
-      throws CarbonUtilException {
+      throws IOException {
     TableBlockInfo tableBlockInfo = tableBlockUniqueIdentifier.getTableBlockInfo();
     BlockInfo blockInfo = new BlockInfo(tableBlockInfo);
     String lruCacheKey =
@@ -158,10 +157,10 @@ public class BlockIndexStore<K, V> extends AbstractBlockIndexStoreCache<K, V> {
    *
    * @param tableBlocksInfos List of unique table blocks
    * @return List<AbstractIndex>
-   * @throws CarbonUtilException
+   * @throws IndexBuilderException
    */
   @Override public List<AbstractIndex> getAll(List<TableBlockUniqueIdentifier> tableBlocksInfos)
-      throws CarbonUtilException {
+      throws IndexBuilderException {
     AbstractIndex[] loadedBlock = new AbstractIndex[tableBlocksInfos.size()];
     int numberOfCores = 1;
     try {
@@ -181,8 +180,7 @@ public class BlockIndexStore<K, V> extends AbstractBlockIndexStoreCache<K, V> {
     try {
       executor.awaitTermination(1, TimeUnit.HOURS);
     } catch (InterruptedException e) {
-      IndexBuilderException indexBuilderException = new IndexBuilderException(e);
-      throw new CarbonUtilException(indexBuilderException.getMessage(), indexBuilderException);
+      throw new IndexBuilderException(e);
     }
     // fill the block which were not loaded before to loaded blocks array
     fillLoadedBlocks(loadedBlock, blocksList);
@@ -241,10 +239,10 @@ public class BlockIndexStore<K, V> extends AbstractBlockIndexStoreCache<K, V> {
    *
    * @param loadedBlockArray array of blocks which will be filled
    * @param blocksList       blocks loaded in thread
-   * @throws CarbonUtilException in case of any failure
+   * @throws IndexBuilderException in case of any failure
    */
   private void fillLoadedBlocks(AbstractIndex[] loadedBlockArray,
-      List<Future<AbstractIndex>> blocksList) throws CarbonUtilException {
+      List<Future<AbstractIndex>> blocksList) throws IndexBuilderException {
     int blockCounter = 0;
     boolean exceptionOccurred = false;
     Throwable exceptionRef = null;
@@ -260,7 +258,7 @@ public class BlockIndexStore<K, V> extends AbstractBlockIndexStoreCache<K, V> {
       LOGGER.error("Block B-Tree loading failed. Clearing the access count of the loaded blocks.");
       // in case of any failure clear the access count for the valid loaded blocks
       clearAccessCountForLoadedBlocks(loadedBlockArray);
-      throw new CarbonUtilException("Block B-tree loading failed", exceptionRef);
+      throw new IndexBuilderException("Block B-tree loading failed", exceptionRef);
     }
   }
 
@@ -295,7 +293,7 @@ public class BlockIndexStore<K, V> extends AbstractBlockIndexStoreCache<K, V> {
   }
 
   private AbstractIndex loadBlock(TableBlockUniqueIdentifier tableBlockUniqueIdentifier)
-      throws CarbonUtilException {
+      throws IOException {
     AbstractIndex tableBlock = new BlockIndex();
     BlockInfo blockInfo = new BlockInfo(tableBlockUniqueIdentifier.getTableBlockInfo());
     String lruCacheKey =

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNode.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNode.java
index 0ddd8c5..28131d6 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNode.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNode.java
@@ -18,6 +18,8 @@
  */
 package org.apache.carbondata.core.carbon.datastore;
 
+import java.io.IOException;
+
 import org.apache.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
 import org.apache.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
 import org.apache.carbondata.core.datastorage.store.FileHolder;
@@ -55,7 +57,6 @@ public interface DataRefNode {
    * This method will be used to get the max value of all the columns this can
    * be used in case of filter query
    *
-   * @param max value of all the columns
    */
   byte[][] getColumnsMaxValue();
 
@@ -63,7 +64,6 @@ public interface DataRefNode {
    * This method will be used to get the min value of all the columns this can
    * be used in case of filter query
    *
-   * @param min value of all the columns
    */
   byte[][] getColumnsMinValue();
 
@@ -79,16 +79,17 @@ public interface DataRefNode {
    *                     data in On IO
    * @return dimension data chunks
    */
-  DimensionColumnDataChunk[] getDimensionChunks(FileHolder fileReader, int[][] blockIndexes);
+  DimensionColumnDataChunk[] getDimensionChunks(FileHolder fileReader, int[][] blockIndexes)
+      throws IOException;
 
   /**
    * Below method will be used to get the dimension chunk
    *
    * @param fileReader file reader to read the chunk from file
-   * @param blockIndex block index to be read
    * @return dimension data chunk
    */
-  DimensionColumnDataChunk getDimensionChunk(FileHolder fileReader, int blockIndexes);
+  DimensionColumnDataChunk getDimensionChunk(FileHolder fileReader, int blockIndexes)
+      throws IOException;
 
   /**
    * Below method will be used to get the measure chunk
@@ -102,7 +103,8 @@ public interface DataRefNode {
    *                     data in On IO
    * @return measure column data chunk
    */
-  MeasureColumnDataChunk[] getMeasureChunks(FileHolder fileReader, int[][] blockIndexes);
+  MeasureColumnDataChunk[] getMeasureChunks(FileHolder fileReader, int[][] blockIndexes)
+      throws IOException;
 
   /**
    * Below method will be used to read the measure chunk
@@ -111,5 +113,5 @@ public interface DataRefNode {
    * @param blockIndex block index to be read from file
    * @return measure data chunk
    */
-  MeasureColumnDataChunk getMeasureChunk(FileHolder fileReader, int blockIndex);
+  MeasureColumnDataChunk getMeasureChunk(FileHolder fileReader, int blockIndex) throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNodeFinder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNodeFinder.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNodeFinder.java
index 78592f7..c8edc0f 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNodeFinder.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNodeFinder.java
@@ -28,7 +28,6 @@ public interface DataRefNodeFinder {
    * the search key
    *
    * @param dataBlocks complete data blocks present
-   * @param serachKey  key to be search
    * @return data block
    */
   DataRefNode findFirstDataBlock(DataRefNode dataBlocks, IndexKey searchKey);
@@ -38,7 +37,6 @@ public interface DataRefNodeFinder {
    * the search key
    *
    * @param dataBlocks complete data blocks present
-   * @param serachKey  key to be search
    * @return data block
    */
   DataRefNode findLastDataBlock(DataRefNode dataBlocks, IndexKey searchKey);

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/SegmentTaskIndexStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/SegmentTaskIndexStore.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/SegmentTaskIndexStore.java
index ef0ac62..eb8aea2 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/SegmentTaskIndexStore.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/SegmentTaskIndexStore.java
@@ -18,6 +18,7 @@
  */
 package org.apache.carbondata.core.carbon.datastore;
 
+import java.io.IOException;
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -40,7 +41,6 @@ import org.apache.carbondata.core.carbon.datastore.exception.IndexBuilderExcepti
 import org.apache.carbondata.core.carbon.metadata.blocklet.DataFileFooter;
 import org.apache.carbondata.core.carbon.path.CarbonTablePath.DataFileUtil;
 import org.apache.carbondata.core.util.CarbonUtil;
-import org.apache.carbondata.core.util.CarbonUtilException;
 
 /**
  * Class to handle loading, unloading,clearing,storing of the table
@@ -81,16 +81,11 @@ public class SegmentTaskIndexStore
 
   @Override
   public SegmentTaskIndexWrapper get(TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier)
-      throws CarbonUtilException {
-    SegmentTaskIndexWrapper segmentTaskIndexWrapper = null;
-    try {
-      segmentTaskIndexWrapper =
-          loadAndGetTaskIdToSegmentsMap(tableSegmentUniqueIdentifier.getSegmentToTableBlocksInfos(),
-              tableSegmentUniqueIdentifier.getAbsoluteTableIdentifier(),
-              tableSegmentUniqueIdentifier);
-    } catch (IndexBuilderException e) {
-      throw new CarbonUtilException(e.getMessage(), e);
-    }
+      throws IOException {
+    SegmentTaskIndexWrapper segmentTaskIndexWrapper =
+        loadAndGetTaskIdToSegmentsMap(tableSegmentUniqueIdentifier.getSegmentToTableBlocksInfos(),
+            tableSegmentUniqueIdentifier.getAbsoluteTableIdentifier(),
+            tableSegmentUniqueIdentifier);
     if (null != segmentTaskIndexWrapper) {
       segmentTaskIndexWrapper.incrementAccessCount();
     }
@@ -102,21 +97,21 @@ public class SegmentTaskIndexStore
    *
    * @param tableSegmentUniqueIdentifiers
    * @return
-   * @throws CarbonUtilException
+   * @throws IOException
    */
   @Override public List<SegmentTaskIndexWrapper> getAll(
-      List<TableSegmentUniqueIdentifier> tableSegmentUniqueIdentifiers) throws CarbonUtilException {
+      List<TableSegmentUniqueIdentifier> tableSegmentUniqueIdentifiers) throws IOException {
     List<SegmentTaskIndexWrapper> segmentTaskIndexWrappers =
         new ArrayList<>(tableSegmentUniqueIdentifiers.size());
     try {
       for (TableSegmentUniqueIdentifier segmentUniqueIdentifier : tableSegmentUniqueIdentifiers) {
         segmentTaskIndexWrappers.add(get(segmentUniqueIdentifier));
       }
-    } catch (CarbonUtilException e) {
+    } catch (IOException e) {
       for (SegmentTaskIndexWrapper segmentTaskIndexWrapper : segmentTaskIndexWrappers) {
         segmentTaskIndexWrapper.clear();
       }
-      throw new CarbonUtilException("Problem in loading segment blocks.", e);
+      throw e;
     }
     return segmentTaskIndexWrappers;
   }
@@ -155,13 +150,12 @@ public class SegmentTaskIndexStore
    * @param segmentToTableBlocksInfos segment id to block info
    * @param absoluteTableIdentifier   absolute table identifier
    * @return map of taks id to segment mapping
-   * @throws IndexBuilderException
+   * @throws IOException
    */
   private SegmentTaskIndexWrapper loadAndGetTaskIdToSegmentsMap(
       Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos,
       AbsoluteTableIdentifier absoluteTableIdentifier,
-      TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier)
-      throws IndexBuilderException, CarbonUtilException {
+      TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier) throws IOException {
     // task id to segment map
     Iterator<Map.Entry<String, List<TableBlockInfo>>> iteratorOverSegmentBlocksInfos =
         segmentToTableBlocksInfos.entrySet().iterator();
@@ -228,7 +222,7 @@ public class SegmentTaskIndexStore
       }
     } catch (IndexBuilderException e) {
       LOGGER.error("Problem while loading the segment");
-      throw new IndexBuilderException(e);
+      throw e;
     }
     return segmentTaskIndexWrapper;
   }
@@ -304,11 +298,11 @@ public class SegmentTaskIndexStore
    *
    * @param tableBlockInfoList
    * @return loaded segment
-   * @throws CarbonUtilException
+   * @throws IOException
    */
   private AbstractIndex loadBlocks(TaskBucketHolder taskBucketHolder,
       List<TableBlockInfo> tableBlockInfoList, AbsoluteTableIdentifier tableIdentifier)
-      throws CarbonUtilException {
+      throws IOException {
     // all the block of one task id will be loaded together
     // so creating a list which will have all the data file meta data to of one task
     List<DataFileFooter> footerList = CarbonUtil

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/AbstractIndex.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/AbstractIndex.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/AbstractIndex.java
index dd712fa..4dd76c6 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/AbstractIndex.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/AbstractIndex.java
@@ -53,13 +53,6 @@ public abstract class AbstractIndex implements Cacheable {
   protected long memorySize;
 
   /**
-   * @return the totalNumberOfRows
-   */
-  public long getTotalNumberOfRows() {
-    return totalNumberOfRows;
-  }
-
-  /**
    * @return the segmentProperties
    */
   public SegmentProperties getSegmentProperties() {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/BlockIndex.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/BlockIndex.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/BlockIndex.java
index cfdb127..75ebb84 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/BlockIndex.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/BlockIndex.java
@@ -34,7 +34,6 @@ public class BlockIndex extends AbstractIndex {
   /**
    * Below method will be used to load the data block
    *
-   * @param blockInfo block detail
    */
   public void buildIndex(List<DataFileFooter> footerList) {
     // create a metadata details

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/SegmentProperties.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/SegmentProperties.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/SegmentProperties.java
index 05ad4e6..83d68ef 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/SegmentProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/SegmentProperties.java
@@ -160,7 +160,7 @@ public class SegmentProperties {
         new HashMap<Integer, Set<Integer>>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
     measuresOrdinalToBlockMapping =
         new HashMap<Integer, Integer>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
-    intialiseColGroups(columnsInTable);
+    intialiseColGroups();
     fillOrdinalToBlockMappingForDimension();
     fillOrdinalToBlockIndexMappingForMeasureColumns();
     fillColumnGroupAndItsCardinality(columnCardinality);
@@ -171,9 +171,8 @@ public class SegmentProperties {
    * it fills column groups
    * e.g {{1},{2,3,4},{5},{6},{7,8,9}}
    *
-   * @param columnsInTable
    */
-  private void intialiseColGroups(List<ColumnSchema> columnsInTable) {
+  private void intialiseColGroups() {
     // StringBuffer columnGroups = new StringBuffer();
     List<List<Integer>> colGrpList = new ArrayList<List<Integer>>();
     List<Integer> group = new ArrayList<Integer>();

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/SegmentTaskIndex.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/SegmentTaskIndex.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/SegmentTaskIndex.java
index ce7a63a..655226f 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/SegmentTaskIndex.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/SegmentTaskIndex.java
@@ -34,7 +34,6 @@ public class SegmentTaskIndex extends AbstractIndex {
   /**
    * Below method is store the blocks in some data structure
    *
-   * @param blockInfo block detail
    */
   public void buildIndex(List<DataFileFooter> footerList) {
     // create a metadata details

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/TableTaskInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/TableTaskInfo.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/TableTaskInfo.java
index 7ce3a14..da2cff8 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/TableTaskInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/block/TableTaskInfo.java
@@ -21,6 +21,7 @@ import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Set;
 import java.util.TreeMap;
 
@@ -95,7 +96,7 @@ public class TableTaskInfo implements Distributable {
       } else {
         // for the case where all the nodes have same number of blocks then
         // we need to return complete list instead of max node.
-        if (previousValueOccurence != entry.getValue()) {
+        if (!Objects.equals(previousValueOccurence, entry.getValue())) {
           useIndex = false;
         }
       }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/DimensionColumnDataChunk.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/DimensionColumnDataChunk.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/DimensionColumnDataChunk.java
index efa67e6..5bf200d 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/DimensionColumnDataChunk.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/DimensionColumnDataChunk.java
@@ -31,7 +31,6 @@ public interface DimensionColumnDataChunk<T> {
    *
    * @param data   data to filed
    * @param offset offset from which data need to be filed
-   * @param rowId  row id of the chunk
    * @return how many bytes was copied
    */
   int fillChunkData(byte[] data, int offset, int columnIndex, KeyStructureInfo restructuringInfo);
@@ -61,8 +60,6 @@ public interface DimensionColumnDataChunk<T> {
   /**
    * Below method to get  the data based in row id
    *
-   * @param row id
-   *            row id of the data
    * @return chunk
    */
   byte[] getChunkData(int columnIndex);

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/DimensionColumnChunkReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/DimensionColumnChunkReader.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/DimensionColumnChunkReader.java
index 0153211..4758da1 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/DimensionColumnChunkReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/DimensionColumnChunkReader.java
@@ -18,6 +18,8 @@
  */
 package org.apache.carbondata.core.carbon.datastore.chunk.reader;
 
+import java.io.IOException;
+
 import org.apache.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
 import org.apache.carbondata.core.datastorage.store.FileHolder;
 
@@ -35,7 +37,8 @@ public interface DimensionColumnChunkReader {
    * @param blockIndexes blocks to be read
    * @return dimension column chunks
    */
-  DimensionColumnDataChunk[] readDimensionChunks(FileHolder fileReader, int[][] blockIndexes);
+  DimensionColumnDataChunk[] readDimensionChunks(FileHolder fileReader, int[][] blockIndexes)
+      throws IOException;
 
   /**
    * Below method will be used to read the chunk based on block index
@@ -44,5 +47,6 @@ public interface DimensionColumnChunkReader {
    * @param blockIndex block to be read
    * @return dimension column chunk
    */
-  DimensionColumnDataChunk readDimensionChunk(FileHolder fileReader, int blockIndex);
+  DimensionColumnDataChunk readDimensionChunk(FileHolder fileReader, int blockIndex)
+      throws IOException;
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/MeasureColumnChunkReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/MeasureColumnChunkReader.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/MeasureColumnChunkReader.java
index 523a14e..9421ebf 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/MeasureColumnChunkReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/MeasureColumnChunkReader.java
@@ -18,6 +18,8 @@
  */
 package org.apache.carbondata.core.carbon.datastore.chunk.reader;
 
+import java.io.IOException;
+
 import org.apache.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
 import org.apache.carbondata.core.datastorage.store.FileHolder;
 
@@ -33,7 +35,8 @@ public interface MeasureColumnChunkReader {
    * @param blockIndexes blocks to be read
    * @return measure data chunks
    */
-  MeasureColumnDataChunk[] readMeasureChunks(FileHolder fileReader, int[][] blockIndexes);
+  MeasureColumnDataChunk[] readMeasureChunks(FileHolder fileReader, int[][] blockIndexes)
+      throws IOException;
 
   /**
    * Method to read the blocks data based on block index
@@ -42,6 +45,6 @@ public interface MeasureColumnChunkReader {
    * @param blockIndex block to be read
    * @return measure data chunk
    */
-  MeasureColumnDataChunk readMeasureChunk(FileHolder fileReader, int blockIndex);
+  MeasureColumnDataChunk readMeasureChunk(FileHolder fileReader, int blockIndex) throws IOException;
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/AbstractChunkReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/AbstractChunkReader.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/AbstractChunkReader.java
index cfe7079..6638c96 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/AbstractChunkReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/AbstractChunkReader.java
@@ -67,7 +67,6 @@ public abstract class AbstractChunkReader implements DimensionColumnChunkReader
    * Constructor to get minimum parameter to create
    * instance of this class
    *
-   * @param dimensionColumnChunk dimension chunk metadata
    * @param eachColumnValueSize  size of the each column value
    * @param filePath             file from which data will be read
    */

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
index 00ad35f..abacbd4 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v1/CompressedDimensionChunkFileBasedReaderV1.java
@@ -18,6 +18,7 @@
  */
 package org.apache.carbondata.core.carbon.datastore.chunk.reader.dimension.v1;
 
+import java.io.IOException;
 import java.util.List;
 
 import org.apache.carbondata.core.carbon.datastore.chunk.DimensionChunkAttributes;
@@ -65,7 +66,7 @@ public class CompressedDimensionChunkFileBasedReaderV1 extends AbstractChunkRead
    * @return dimension column chunks
    */
   @Override public DimensionColumnDataChunk[] readDimensionChunks(FileHolder fileReader,
-      int[][] blockIndexes) {
+      int[][] blockIndexes) throws IOException {
     // read the column chunk based on block index and add
     DimensionColumnDataChunk[] dataChunks =
         new DimensionColumnDataChunk[dimensionColumnChunk.size()];
@@ -85,7 +86,7 @@ public class CompressedDimensionChunkFileBasedReaderV1 extends AbstractChunkRead
    * @return dimension column chunk
    */
   @Override public DimensionColumnDataChunk readDimensionChunk(FileHolder fileReader,
-      int blockIndex) {
+      int blockIndex) throws IOException {
     byte[] dataPage = null;
     int[] invertedIndexes = null;
     int[] invertedIndexesReverse = null;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
index 810fe08..7843d05 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/dimension/v2/CompressedDimensionChunkFileBasedReaderV2.java
@@ -18,6 +18,7 @@
  */
 package org.apache.carbondata.core.carbon.datastore.chunk.reader.dimension.v2;
 
+import java.io.IOException;
 import java.util.List;
 
 import org.apache.carbondata.core.carbon.datastore.chunk.DimensionChunkAttributes;
@@ -77,7 +78,7 @@ public class CompressedDimensionChunkFileBasedReaderV2 extends AbstractChunkRead
    * @return dimension column chunks
    */
   @Override public DimensionColumnDataChunk[] readDimensionChunks(final FileHolder fileReader,
-      final int[][] blockIndexes) {
+      final int[][] blockIndexes) throws IOException {
     // read the column chunk based on block index and add
     DimensionColumnDataChunk[] dataChunks =
         new DimensionColumnDataChunk[dimensionChunksOffset.size()];
@@ -122,7 +123,7 @@ public class CompressedDimensionChunkFileBasedReaderV2 extends AbstractChunkRead
    * @return dimension column chunk
    */
   @Override public DimensionColumnDataChunk readDimensionChunk(FileHolder fileReader,
-      int blockIndex) {
+      int blockIndex) throws IOException {
     byte[] dataPage = null;
     int[] invertedIndexes = null;
     int[] invertedIndexesReverse = null;
@@ -217,7 +218,7 @@ public class CompressedDimensionChunkFileBasedReaderV2 extends AbstractChunkRead
    * @return dimension column chunk array
    */
   private DimensionColumnDataChunk[] readDimensionChunksInGroup(FileHolder fileReader,
-      int startBlockIndex, int endBlockIndex) {
+      int startBlockIndex, int endBlockIndex) throws IOException {
     long currentDimensionOffset = dimensionChunksOffset.get(startBlockIndex);
     byte[] data = fileReader.readByteArray(filePath, currentDimensionOffset,
         (int) (dimensionChunksOffset.get(endBlockIndex + 1) - currentDimensionOffset));

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java
index 6c74379..0a44336 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/AbstractMeasureChunkReader.java
@@ -33,9 +33,6 @@ public abstract class AbstractMeasureChunkReader implements MeasureColumnChunkRe
   /**
    * Constructor to get minimum parameter to create instance of this class
    *
-   * @param measureColumnChunk measure chunk metadata
-   * @param compression        model metadata which was to used to compress and uncompress
-   *                           the measure value
    * @param filePath           file from which data will be read
    */
   public AbstractMeasureChunkReader(String filePath) {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaadc88a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
index 8bd9245..3dd0d44 100644
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
+++ b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/chunk/reader/measure/v1/CompressedMeasureChunkFileBasedReaderV1.java
@@ -18,6 +18,7 @@
  */
 package org.apache.carbondata.core.carbon.datastore.chunk.reader.measure.v1;
 
+import java.io.IOException;
 import java.util.List;
 
 import org.apache.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
@@ -27,8 +28,6 @@ import org.apache.carbondata.core.carbon.metadata.blocklet.datachunk.DataChunk;
 import org.apache.carbondata.core.datastorage.store.FileHolder;
 import org.apache.carbondata.core.datastorage.store.compression.ReaderCompressModel;
 import org.apache.carbondata.core.datastorage.store.compression.ValueCompressonHolder.UnCompressValue;
-
-
 import org.apache.carbondata.core.datastorage.store.dataholder.CarbonReadDataHolder;
 import org.apache.carbondata.core.metadata.ValueEncoderMeta;
 import org.apache.carbondata.core.util.ValueCompressionUtil;
@@ -63,7 +62,7 @@ public class CompressedMeasureChunkFileBasedReaderV1 extends AbstractMeasureChun
    * @return measure data chunks
    */
   @Override public MeasureColumnDataChunk[] readMeasureChunks(final FileHolder fileReader,
-      final int[][] blockIndexes) {
+      final int[][] blockIndexes) throws IOException {
     MeasureColumnDataChunk[] datChunk = new MeasureColumnDataChunk[measureColumnChunks.size()];
     for (int i = 0; i < blockIndexes.length; i++) {
       for (int j = blockIndexes[i][0]; j <= blockIndexes[i][1]; j++) {
@@ -81,7 +80,7 @@ public class CompressedMeasureChunkFileBasedReaderV1 extends AbstractMeasureChun
    * @return measure data chunk
    */
   @Override public MeasureColumnDataChunk readMeasureChunk(final FileHolder fileReader,
-      final int blockIndex) {
+      final int blockIndex) throws IOException {
     ValueEncoderMeta meta = measureColumnChunks.get(blockIndex).getValueEncoderMeta().get(0);
     ReaderCompressModel compressModel = ValueCompressionUtil.getReaderCompressModel(meta);
     UnCompressValue values = compressModel.getUnCompressValues().getNew().getCompressorObject();


Mime
View raw message