carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chenliang...@apache.org
Subject [47/50] [abbrv] incubator-carbondata git commit: Merge remote-tracking branch 'carbon_master/master' into apache/master
Date Wed, 20 Jul 2016 10:14:15 GMT
Merge remote-tracking branch 'carbon_master/master' into apache/master

Conflicts:
	README.md
	core/src/main/java/org/carbondata/core/carbon/path/CarbonTablePath.java
	core/src/main/java/org/carbondata/core/constants/CarbonCommonConstants.java
	core/src/main/java/org/carbondata/core/datastorage/store/columnar/BlockIndexerStorageForNoInvertedIndex.java
	core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
	core/src/main/java/org/carbondata/query/aggregator/impl/AvgLongAggregator.java
	core/src/main/java/org/carbondata/query/aggregator/util/MeasureAggregatorFactory.java
	core/src/main/java/org/carbondata/query/carbon/aggregator/dimension/impl/ColumnGroupDimensionsAggregator.java
	core/src/main/java/org/carbondata/query/carbon/aggregator/dimension/impl/FixedLengthDimensionAggregator.java
	core/src/main/java/org/carbondata/query/carbon/aggregator/dimension/impl/VariableLengthDimensionAggregator.java
	core/src/main/java/org/carbondata/query/carbon/aggregator/expression/ExpressionAggregator.java
	core/src/main/java/org/carbondata/query/carbon/aggregator/impl/ListBasedResultAggregator.java
	core/src/main/java/org/carbondata/query/carbon/aggregator/impl/MapBasedResultAggregator.java
	core/src/main/java/org/carbondata/query/carbon/executor/QueryExecutorFactory.java
	core/src/main/java/org/carbondata/query/carbon/executor/impl/AggregationQueryExecutor.java
	core/src/main/java/org/carbondata/query/carbon/executor/impl/CountStarQueryExecutor.java
	core/src/main/java/org/carbondata/query/carbon/executor/impl/DetailRawRecordQueryExecutor.java
	core/src/main/java/org/carbondata/query/carbon/executor/impl/DetailWithOrderByQueryExecutor.java
	core/src/main/java/org/carbondata/query/carbon/executor/impl/FunctionQueryExecutor.java
	core/src/main/java/org/carbondata/query/carbon/executor/internal/impl/InternalAbstractQueryExecutor.java
	core/src/main/java/org/carbondata/query/carbon/executor/internal/impl/InternalCountStartQueryExecutor.java
	core/src/main/java/org/carbondata/query/carbon/executor/internal/impl/InternalDetailQueryExecutor.java
	core/src/main/java/org/carbondata/query/carbon/merger/impl/SortedScannedResultMerger.java
	core/src/main/java/org/carbondata/query/carbon/result/BatchRawResult.java
	core/src/main/java/org/carbondata/query/carbon/result/ListBasedResultWrapper.java
	core/src/main/java/org/carbondata/query/carbon/result/impl/MapBasedResult.java
	core/src/main/java/org/carbondata/query/carbon/result/iterator/AbstractDetailQueryResultIterator.java
	core/src/main/java/org/carbondata/query/carbon/result/iterator/ChunkBasedResultIterator.java
	core/src/main/java/org/carbondata/query/carbon/result/iterator/DetailQueryResultIterator.java
	core/src/main/java/org/carbondata/query/carbon/result/iterator/DetailRawQueryResultIterator.java
	core/src/main/java/org/carbondata/query/carbon/result/preparator/QueryResultPreparator.java
	core/src/main/java/org/carbondata/query/carbon/result/preparator/impl/AbstractQueryResultPreparator.java
	core/src/main/java/org/carbondata/query/carbon/result/preparator/impl/QueryResultPreparatorImpl.java
	core/src/main/java/org/carbondata/query/carbon/result/preparator/impl/RawQueryResultPreparatorImpl.java
	core/src/main/java/org/carbondata/query/expression/DataType.java
	core/src/main/java/org/carbondata/scan/executor/impl/AbstractQueryExecutor.java
	core/src/main/java/org/carbondata/scan/executor/impl/DetailQueryExecutor.java
	core/src/main/java/org/carbondata/scan/executor/impl/QueryExecutorProperties.java
	core/src/main/java/org/carbondata/scan/executor/infos/AggregatorInfo.java
	core/src/main/java/org/carbondata/scan/executor/infos/BlockExecutionInfo.java
	core/src/main/java/org/carbondata/scan/executor/util/QueryUtil.java
	core/src/main/java/org/carbondata/scan/expression/ExpressionResult.java
	core/src/main/java/org/carbondata/scan/expression/conditional/EqualToExpression.java
	core/src/main/java/org/carbondata/scan/expression/conditional/InExpression.java
	core/src/main/java/org/carbondata/scan/expression/conditional/NotEqualsExpression.java
	core/src/main/java/org/carbondata/scan/expression/conditional/NotInExpression.java
	core/src/main/java/org/carbondata/scan/filter/FilterUtil.java
	core/src/main/java/org/carbondata/scan/filter/GenericQueryType.java
	core/src/main/java/org/carbondata/scan/filter/executer/RowLevelFilterExecuterImpl.java
	core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/DimColumnResolvedFilterInfo.java
	core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/CustomTypeDictionaryVisitor.java
	core/src/main/java/org/carbondata/scan/filter/resolver/resolverinfo/visitor/NoDictionaryTypeVisitor.java
	core/src/main/java/org/carbondata/scan/model/QueryModel.java
	core/src/main/java/org/carbondata/scan/result/AbstractScannedResult.java
	core/src/main/java/org/carbondata/scan/result/BatchResult.java
	core/src/main/java/org/carbondata/scan/result/Result.java
	core/src/main/java/org/carbondata/scan/result/impl/ListBasedResult.java
	core/src/main/java/org/carbondata/scan/result/iterator/ChunkRowIterator.java
	hadoop/src/main/java/org/carbondata/hadoop/CarbonRecordReader.java
	integration/spark/src/main/java/org/carbondata/spark/load/CarbonLoaderUtil.java
	integration/spark/src/main/scala/org/apache/spark/sql/CarbonContext.scala
	integration/spark/src/main/scala/org/apache/spark/sql/CarbonDatasourceRelation.scala
	integration/spark/src/main/scala/org/apache/spark/sql/CarbonDictionaryDecoder.scala
	integration/spark/src/main/scala/org/apache/spark/sql/CarbonOperators.scala
	integration/spark/src/main/scala/org/apache/spark/sql/CarbonRawAggregate.scala
	integration/spark/src/main/scala/org/apache/spark/sql/CarbonRawOperators.scala
	integration/spark/src/main/scala/org/apache/spark/sql/CarbonSQLConf.scala
	integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
	integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
	integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonMetastoreCatalog.scala
	integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonRawStrategies.scala
	integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonStrategies.scala
	integration/spark/src/main/scala/org/apache/spark/sql/optimizer/CarbonOptimizer.scala
	integration/spark/src/main/scala/org/carbondata/spark/KeyVal.scala
	integration/spark/src/main/scala/org/carbondata/spark/agg/CarbonAggregates.scala
	integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonCleanFilesRDD.scala
	integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
	integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDeleteLoadRDD.scala
	integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDropAggregateTableRDD.scala
	integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDropTableRDD.scala
	integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonGlobalDictionaryRDD.scala
	integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonRawQueryRDD.scala
	integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonScanRDD.scala
	integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
	integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataload/TestNoInvertedIndexLoadAndQuery.scala
	integration/spark/src/test/scala/org/carbondata/spark/testsuite/detailquery/AllDataTypesTestCase.scala
	integration/spark/src/test/scala/org/carbondata/spark/testsuite/detailquery/HighCardinalityDataTypesTestCase.scala
	integration/spark/src/test/scala/org/carbondata/spark/testsuite/filterexpr/AllDataTypesTestCaseFilter.scala
	integration/spark/src/test/scala/org/carbondata/spark/testsuite/joinquery/AllDataTypesTestCaseJoin.scala
	integration/spark/src/test/scala/org/carbondata/spark/testsuite/sortexpr/AllDataTypesTestCaseSort.scala
	integration/spark/src/test/scala/org/carbondata/spark/util/AutoHighCardinalityIdentifyTestCase.scala
	integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilTestCase.scala
	processing/src/main/java/org/carbondata/lcm/locks/LocalFileLock.java
	processing/src/main/java/org/carbondata/lcm/locks/ZooKeeperLocking.java
	processing/src/main/java/org/carbondata/lcm/status/SegmentStatusManager.java
	processing/src/main/java/org/carbondata/processing/csvreaderstep/CsvInput.java
	processing/src/main/java/org/carbondata/processing/store/CarbonFactDataHandlerColumnar.java


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/eaecb651
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/eaecb651
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/eaecb651

Branch: refs/heads/master
Commit: eaecb65129fc50c821f8b3cc43a1cab2046c8e3a
Parents: e5ed64b ad48c4c
Author: ravipesala <ravi.pesala@gmail.com>
Authored: Tue Jul 19 21:29:08 2016 +0530
Committer: ravipesala <ravi.pesala@gmail.com>
Committed: Tue Jul 19 21:29:08 2016 +0530

----------------------------------------------------------------------
 .../carbondata/common/logging/LogService.java   |   7 +
 .../common/logging/impl/StandardLogService.java |  18 +-
 .../common/logging/impl/StatisticLevel.java     |  64 ++
 .../cache/dictionary/ColumnDictionaryInfo.java  |   9 +-
 .../carbon/datastore/block/Distributable.java   |  25 +
 .../datastore/block/SegmentProperties.java      | 110 ++-
 .../carbon/datastore/block/TableBlockInfo.java  |  35 +-
 .../carbon/datastore/block/TableTaskInfo.java   | 114 +++
 .../core/carbon/metadata/CarbonMetadata.java    |  31 +
 .../metadata/schema/table/CarbonTable.java      |  29 +-
 .../schema/table/column/CarbonDimension.java    |  19 +-
 .../core/carbon/path/CarbonTablePath.java       |  26 +
 .../carbon/querystatistics/QueryStatistic.java  |  73 ++
 .../QueryStatisticsRecorder.java                |  74 ++
 .../core/constants/CarbonCommonConstants.java   |  18 +
 .../BlockIndexerStorageForNoInvertedIndex.java  |   2 +-
 .../TimeStampDirectDictionaryGenerator.java     |  58 +-
 .../CarbonDictionaryMetadataReaderImpl.java     |  28 +-
 .../core/reader/CarbonDictionaryReaderImpl.java |  32 +-
 .../CarbonDictionarySortIndexReaderImpl.java    |  25 +-
 .../core/util/CarbonLoadStatisticsDummy.java    | 114 +++
 .../core/util/CarbonLoadStatisticsImpl.java     | 426 +++++++++++
 .../core/util/CarbonTimeStatisticsFactory.java  |  52 ++
 .../core/util/DataFileFooterConverter.java      |  54 +-
 .../org/carbondata/core/util/DataTypeUtil.java  |  40 ++
 .../carbondata/core/util/LoadStatistics.java    |  67 ++
 .../core/writer/CarbonDictionaryWriter.java     |   3 +
 .../core/writer/CarbonDictionaryWriterImpl.java |  15 +-
 .../core/writer/CarbonFooterWriter.java         |  11 +-
 .../CarbonDictionarySortIndexWriterImpl.java    |  58 +-
 .../CarbonDictionarySortInfoPreparator.java     |  55 +-
 .../complex/querytypes/ArrayQueryType.java      | 158 ++++
 .../complex/querytypes/ComplexQueryType.java    |  79 ++
 .../complex/querytypes/PrimitiveQueryType.java  | 186 +++++
 .../complex/querytypes/StructQueryType.java     | 183 +++++
 .../executor/impl/AbstractQueryExecutor.java    |  58 +-
 .../executor/impl/QueryExecutorProperties.java  |  30 +-
 .../scan/executor/infos/BlockExecutionInfo.java |  52 ++
 .../scan/executor/util/QueryUtil.java           | 198 ++++-
 .../scan/executor/util/RestructureUtil.java     |  11 +-
 .../scan/expression/ExpressionResult.java       |  10 +-
 .../conditional/EqualToExpression.java          |  15 +-
 .../expression/conditional/InExpression.java    |  66 +-
 .../conditional/NotEqualsExpression.java        |  16 +-
 .../expression/conditional/NotInExpression.java |  68 +-
 .../scan/filter/FilterExpressionProcessor.java  |  12 +-
 .../org/carbondata/scan/filter/FilterUtil.java  |  75 +-
 .../scan/filter/GenericQueryType.java           |   7 +-
 .../executer/RowLevelFilterExecuterImpl.java    |  35 +-
 .../RowLevelRangeGrtThanFiterExecuterImpl.java  |   3 +-
 ...elRangeGrtrThanEquaToFilterExecuterImpl.java |   3 +-
 ...velRangeLessThanEqualFilterExecuterImpl.java |   3 +-
 .../RowLevelRangeLessThanFiterExecuterImpl.java |   3 +-
 .../DimColumnResolvedFilterInfo.java            |  11 -
 .../visitor/CustomTypeDictionaryVisitor.java    |  25 +-
 .../visitor/DictionaryColumnVisitor.java        |   7 +
 .../visitor/NoDictionaryTypeVisitor.java        |   8 +-
 .../org/carbondata/scan/model/QueryModel.java   |  56 +-
 .../scan/result/AbstractScannedResult.java      |  37 +-
 .../dictionary/AbstractDictionaryCacheTest.java |   1 +
 .../datastore/block/SegmentPropertiesTest.java  |   4 +-
 .../metadata/schema/table/CarbonTableTest.java  |   2 +-
 .../CarbonFormatDirectoryStructureTest.java     |   4 +
 .../writer/CarbonDictionaryWriterImplTest.java  |  15 +-
 .../src/main/resources/datawithoutheader.csv    |  10 +
 .../examples/AllDictionaryExample.scala         |  62 ++
 .../examples/util/AllDictionaryUtil.scala       | 108 +++
 .../carbondata/hadoop/CarbonInputFormat.java    |  10 +-
 .../hadoop/test/util/StoreCreator.java          |   5 +-
 .../spark/merger/CarbonCompactionExecutor.java  |   2 +
 .../carbondata/spark/load/CarbonLoadModel.java  |  70 ++
 .../carbondata/spark/load/CarbonLoaderUtil.java | 243 ++++---
 .../spark/load/DeleteLoadFolders.java           |   9 +
 .../spark/merger/NodeBlockRelation.java         |   8 +-
 .../spark/merger/NodeMultiBlockRelation.java    |   8 +-
 .../org/apache/spark/sql/CarbonContext.scala    |  27 +
 .../sql/CarbonDatasourceHadoopRelation.scala    |   4 +-
 .../spark/sql/CarbonDatasourceRelation.scala    |  42 +-
 .../org/apache/spark/sql/CarbonSqlParser.scala  | 241 +++++--
 .../execution/command/carbonTableSchema.scala   |  54 +-
 .../spark/sql/hive/CarbonMetastoreCatalog.scala |  73 +-
 .../spark/sql/hive/CarbonStrategies.scala       |  13 +-
 .../spark/sql/hive/DistributionUtil.scala       | 147 ++++
 .../spark/rdd/CarbonDataLoadRDD.scala           |  15 +-
 .../spark/rdd/CarbonDataRDDFactory.scala        |  35 +-
 .../spark/rdd/CarbonGlobalDictionaryRDD.scala   | 311 ++++++--
 .../carbondata/spark/rdd/CarbonMergerRDD.scala  | 132 +++-
 .../org/carbondata/spark/rdd/Compactor.scala    |  16 +-
 .../spark/tasks/DictionaryWriterTask.scala      | 134 ++++
 .../spark/tasks/SortIndexWriterTask.scala       |  62 ++
 .../org/carbondata/spark/util/CommonUtil.scala  |  38 +
 .../spark/util/GlobalDictionaryUtil.scala       | 521 ++++++++++----
 .../20160423/1400_1405/complex.dictionary       |  20 +
 .../sample/20160423/1400_1405/sample.dictionary |   9 +
 .../spark/src/test/resources/bigIntData.csv     |  14 +
 .../src/test/resources/big_int_Decimal.csv      |   3 +
 .../spark/src/test/resources/channelsId.csv     |  10 +
 .../src/test/resources/complexTypeDecimal.csv   |   9 +
 .../test/resources/complexTypeDecimalNested.csv |   9 +
 .../src/test/resources/complexdatareordered.csv |  10 +
 .../src/test/resources/complextypesample.csv    |  50 ++
 .../src/test/resources/datasamplecomplex.csv    |   2 +
 .../datawithNegeativewithoutHeader.csv          |   4 +
 .../test/resources/datawithNegtiveNumber.csv    |   5 +
 .../test/resources/decimalDataWithHeader.csv    |  18 +-
 .../test/resources/decimalDataWithoutHeader.csv |  18 +-
 .../src/test/resources/deviceInformationId.csv  |  48 ++
 .../src/test/resources/deviceInformationId2.csv |   6 +
 .../src/test/resources/emptyrow/emptyRows.csv   |   7 +
 .../resources/locationInfoActiveCountry.csv     |   9 +
 integration/spark/src/test/resources/mac.csv    |  39 +
 .../spark/src/test/resources/mobileimei.csv     |  24 +
 .../test/resources/nullvalueserialization.csv   |   2 +
 .../test/resources/verticalDelimitedData.csv    |  12 +
 .../apache/spark/sql/TestCarbonSqlParser.scala  |  11 +
 ...plexPrimitiveTimestampDirectDictionary.scala |  60 ++
 .../complexType/TestComplexTypeQuery.scala      | 119 +++
 .../dataload/TestLoadDataWithBlankLine.scala    |   2 +-
 .../TestLoadDataWithMaxMinInteger.scala         |  18 +-
 .../AllDataTypesTestCaseAggregate.scala         |  23 +-
 .../AllDataTypesTestCaseAggregate.scala         |  44 +-
 .../allqueries/TestQueryWithoutDataLoad.scala   |  63 ++
 .../testsuite/bigdecimal/TestAvgForBigInt.scala |  59 ++
 .../testsuite/bigdecimal/TestBigDecimal.scala   |  18 +
 .../TestDimensionWithDecimalDataType.scala      |  61 ++
 .../createtable/TestCreateTableSyntax.scala     |  12 +
 .../datacompaction/DataCompactionLockTest.scala |   2 +-
 .../dataload/TestLoadDataWithHiveSyntax.scala   | 473 ++++++++----
 .../TestNoInvertedIndexLoadAndQuery.scala       |   8 +-
 .../deleteTable/TestDeleteTableNewDDL.scala     |  11 +
 .../detailquery/AllDataTypesTestCase.scala      |   7 +-
 .../ColumnGroupDataTypesTestCase.scala          |  18 +
 .../HighCardinalityDataTypesTestCase.scala      |  82 ++-
 .../NoDictionaryColumnTestCase.scala            |  58 ++
 .../testsuite/emptyrow/TestEmptyRows.scala      |  82 +++
 .../filterexpr/AllDataTypesTestCaseFilter.scala |   7 +-
 .../filterexpr/FilterProcessorTestCase.scala    |  78 ++
 .../HadoopFSRelationTestCase.scala              |   8 +-
 .../joinquery/AllDataTypesTestCaseJoin.scala    |   7 +-
 .../TestNullValueSerialization.scala            | 112 +++
 .../sortexpr/AllDataTypesTestCaseSort.scala     |   7 +-
 .../spark/util/AllDictionaryTestCase.scala      | 140 ++++
 .../AutoHighCardinalityIdentifyTestCase.scala   |  44 +-
 .../spark/util/DictionaryTestCaseUtil.scala     |  52 ++
 .../util/ExternalColumnDictionaryTestCase.scala | 213 ++++++
 ...GlobalDictionaryUtilConcurrentTestCase.scala | 177 +++++
 .../util/GlobalDictionaryUtilTestCase.scala     |  26 +-
 processing/pom.xml                              |   7 +-
 .../carbondata/lcm/locks/CarbonLockFactory.java |  15 +-
 .../org/carbondata/lcm/locks/HdfsFileLock.java  |  28 +-
 .../org/carbondata/lcm/locks/LocalFileLock.java |  50 +-
 .../org/carbondata/lcm/locks/LockUsage.java     |   9 +-
 .../carbondata/lcm/locks/ZooKeeperLocking.java  |  27 +-
 .../lcm/status/SegmentStatusManager.java        |  32 +-
 .../processing/api/dataloader/SchemaInfo.java   |  20 +
 .../processing/csvload/DataGraphExecuter.java   |  35 -
 .../processing/csvload/GraphExecutionUtil.java  |  28 -
 .../csvreaderstep/BlockDataHandler.java         | 718 -------------------
 .../csvreaderstep/CrLfMatcherInterface.java     |  26 -
 .../processing/csvreaderstep/CsvInput.java      | 165 ++---
 .../processing/csvreaderstep/CsvInputData.java  |   5 -
 .../processing/csvreaderstep/CsvInputMeta.java  |  18 +-
 .../processing/csvreaderstep/CustomReader.java  | 157 ++++
 .../csvreaderstep/EmptyPatternMatcher.java      |  28 -
 .../csvreaderstep/MultiByteBigCrLfMatcher.java  |  40 --
 .../MultiByteLittleCrLfMatcher.java             |  40 --
 .../csvreaderstep/MultiBytePatternMatcher.java  |  38 -
 .../csvreaderstep/PatternMatcherInterface.java  |  24 -
 .../csvreaderstep/SingleByteCrLfMatcher.java    |  32 -
 .../csvreaderstep/SingleBytePatternMatcher.java |  28 -
 .../csvreaderstep/UnivocityCsvParser.java       | 182 +++++
 .../csvreaderstep/UnivocityCsvParserVo.java     | 184 +++++
 .../processing/datatypes/PrimitiveDataType.java |   9 +-
 .../graphgenerator/GraphGenerator.java          |  15 +
 .../configuration/GraphConfigurationInfo.java   |  23 +
 .../processing/mdkeygen/MDKeyGenStep.java       |  14 +-
 .../processing/schema/metadata/TableOption.java |  82 +++
 .../schema/metadata/TableOptionWrapper.java     | 106 +++
 .../sortdata/IntermediateFileMerger.java        |   6 +
 .../sortandgroupby/sortdata/SortDataRows.java   |   1 -
 .../sortdatastep/SortKeyStep.java               |   7 +
 .../store/CarbonFactDataHandlerColumnar.java    |   2 +-
 .../CarbonCSVBasedDimSurrogateKeyGen.java       |  39 +
 .../csvbased/CarbonCSVBasedSeqGenMeta.java      |  39 +-
 .../csvbased/CarbonCSVBasedSeqGenStep.java      |  32 +-
 .../FileStoreSurrogateKeyGenForCSV.java         |   8 +
 .../processing/util/CarbonSchemaParser.java     |  16 +-
 .../carbondata/lcm/locks/LocalFileLockTest.java |   7 +-
 .../lcm/locks/ZooKeeperLockingTest.java         |   7 +-
 189 files changed, 7918 insertions(+), 2394 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/common/src/main/java/org/carbondata/common/logging/impl/StandardLogService.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/core/carbon/datastore/block/SegmentProperties.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/core/carbon/datastore/block/SegmentProperties.java
index c1b6b8f,6645da4..df99b4b
--- a/core/src/main/java/org/carbondata/core/carbon/datastore/block/SegmentProperties.java
+++ b/core/src/main/java/org/carbondata/core/carbon/datastore/block/SegmentProperties.java
@@@ -338,11 -361,25 +361,25 @@@ public class SegmentProperties 
          // as complex type will be stored at last so once complex type started all the dimension
          // will be added to complex type
          else if (isComplexDimensionStarted || CarbonUtil.hasDataType(columnSchema.getDataType(),
 -            new DataType[] { DataType.ARRAY, DataType.STRUCT, DataType.MAP })) {
 +            new DataType[] { DataType.ARRAY, DataType.STRUCT })) {
            cardinalityIndexForComplexDimensionColumn.add(tableOrdinal);
-           carbonDimension = new CarbonDimension(columnSchema, ++dimensonOrdinal, -1, -1);
+           carbonDimension =
+               new CarbonDimension(columnSchema, dimensonOrdinal++, -1, -1, complexTypeOrdinal++);
+           carbonDimension.initializeChildDimensionsList(columnSchema.getNumberOfChild());
            complexDimensions.add(carbonDimension);
            isComplexDimensionStarted = true;
+           int previouseOrdinal = dimensonOrdinal;
+           dimensonOrdinal =
+               readAllComplexTypeChildrens(dimensonOrdinal, columnSchema.getNumberOfChild(),
+                   columnsInTable, carbonDimension, complexTypeOrdinal);
+           int numberOfChildrenDimensionAdded = dimensonOrdinal - previouseOrdinal;
+           for (int i = 0; i < numberOfChildrenDimensionAdded; i++) {
+             cardinalityIndexForComplexDimensionColumn.add(++tableOrdinal);
+           }
+           counter = dimensonOrdinal;
+           complexTypeOrdinal = carbonDimension.getListOfChildDimensions()
+               .get(carbonDimension.getListOfChildDimensions().size() - 1).getComplexTypeOrdinal();
+           complexTypeOrdinal++;
            continue;
          } else {
            // for no dictionary dimension

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/core/carbon/path/CarbonTablePath.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/core/constants/CarbonCommonConstants.java
index ca27dce,8dbe4dd..d56c938
--- a/core/src/main/java/org/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/carbondata/core/constants/CarbonCommonConstants.java
@@@ -825,15 -831,10 +838,20 @@@ public final class CarbonCommonConstant
    public static final String DEFAULT_SEGMENT_LEVEL_THRESHOLD = "4,3";
  
    /**
 +   * default location of the carbon metastore db
 +   */
 +  public static final String METASTORE_LOCATION_DEFAULT_VAL = "../carbon.metastore";
 +
 +  /**
 +   * hive connection url
 +   */
 +  public static final String HIVE_CONNECTION_URL = "javax.jdo.option.ConnectionURL";
 +
++  /**
+    * Rocord size in case of compaction.
+    */
+   public static final int COMPACTION_INMEMORY_RECORD_SIZE = 120000;
+ 
    private CarbonCommonConstants() {
    }
  }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/core/keygenerator/directdictionary/timestamp/TimeStampDirectDictionaryGenerator.java
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/core/util/DataFileFooterConverter.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/core/util/DataFileFooterConverter.java
index 52a139c,0000000..cb28386
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/core/util/DataFileFooterConverter.java
+++ b/core/src/main/java/org/carbondata/core/util/DataFileFooterConverter.java
@@@ -1,463 -1,0 +1,467 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.carbondata.core.util;
 +
 +import java.io.ByteArrayInputStream;
 +import java.io.IOException;
 +import java.io.ObjectInputStream;
 +import java.nio.ByteBuffer;
 +import java.util.ArrayList;
 +import java.util.BitSet;
 +import java.util.Iterator;
 +import java.util.List;
 +
 +import org.carbondata.common.logging.LogService;
 +import org.carbondata.common.logging.LogServiceFactory;
 +import org.carbondata.core.carbon.datastore.block.TableBlockInfo;
 +import org.carbondata.core.carbon.metadata.blocklet.BlockletInfo;
 +import org.carbondata.core.carbon.metadata.blocklet.DataFileFooter;
 +import org.carbondata.core.carbon.metadata.blocklet.SegmentInfo;
 +import org.carbondata.core.carbon.metadata.blocklet.compressor.ChunkCompressorMeta;
 +import org.carbondata.core.carbon.metadata.blocklet.compressor.CompressionCodec;
 +import org.carbondata.core.carbon.metadata.blocklet.datachunk.DataChunk;
 +import org.carbondata.core.carbon.metadata.blocklet.datachunk.PresenceMeta;
 +import org.carbondata.core.carbon.metadata.blocklet.index.BlockletBTreeIndex;
 +import org.carbondata.core.carbon.metadata.blocklet.index.BlockletIndex;
 +import org.carbondata.core.carbon.metadata.blocklet.index.BlockletMinMaxIndex;
 +import org.carbondata.core.carbon.metadata.blocklet.sort.SortState;
 +import org.carbondata.core.carbon.metadata.datatype.DataType;
 +import org.carbondata.core.carbon.metadata.encoder.Encoding;
 +import org.carbondata.core.carbon.metadata.schema.table.column.ColumnSchema;
 +import org.carbondata.core.datastorage.store.FileHolder;
 +import org.carbondata.core.datastorage.store.impl.FileFactory;
 +import org.carbondata.core.metadata.ValueEncoderMeta;
 +import org.carbondata.core.reader.CarbonFooterReader;
 +import org.carbondata.core.reader.CarbonIndexFileReader;
 +import org.carbondata.format.BlockIndex;
 +import org.carbondata.format.FileFooter;
 +
 +/**
 + * Below class will be used to convert the thrift object of data file
 + * meta data to wrapper object
 + */
 +public class DataFileFooterConverter {
 +
 +  private static final LogService LOGGER =
 +      LogServiceFactory.getLogService(DataFileFooterConverter.class.getName());
 +
 +  /**
 +   * Below method will be used to get the index info from index file
 +   *
 +   * @param filePath           file path of the index file
 +   * @param tableBlockInfoList table block index
 +   * @return list of index info
 +   * @throws IOException problem while reading the index file
 +   */
 +  public List<DataFileFooter> getIndexInfo(String filePath, List<TableBlockInfo> tableBlockInfoList)
 +      throws IOException {
 +    CarbonIndexFileReader indexReader = new CarbonIndexFileReader();
-     // open the reader
-     indexReader.openThriftReader(filePath);
-     // get the index header
-     org.carbondata.format.IndexHeader readIndexHeader = indexReader.readIndexHeader();
-     List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
-     List<org.carbondata.format.ColumnSchema> table_columns = readIndexHeader.getTable_columns();
-     for (int i = 0; i < table_columns.size(); i++) {
-       columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
-     }
-     // get the segment info
-     SegmentInfo segmentInfo = getSegmentInfo(readIndexHeader.getSegment_info());
-     BlockletIndex blockletIndex = null;
-     int counter = 0;
-     DataFileFooter dataFileFooter = null;
 +    List<DataFileFooter> dataFileFooters = new ArrayList<DataFileFooter>();
-     // read the block info from file
-     while (indexReader.hasNext()) {
-       BlockIndex readBlockIndexInfo = indexReader.readBlockIndexInfo();
-       blockletIndex = getBlockletIndex(readBlockIndexInfo.getBlock_index());
-       dataFileFooter = new DataFileFooter();
-       dataFileFooter.setBlockletIndex(blockletIndex);
-       dataFileFooter.setColumnInTable(columnSchemaList);
-       dataFileFooter.setNumberOfRows(readBlockIndexInfo.getNum_rows());
-       dataFileFooter.setTableBlockInfo(tableBlockInfoList.get(counter++));
-       dataFileFooter.setSegmentInfo(segmentInfo);
-       dataFileFooters.add(dataFileFooter);
++    try {
++      // open the reader
++      indexReader.openThriftReader(filePath);
++      // get the index header
++      org.carbondata.format.IndexHeader readIndexHeader = indexReader.readIndexHeader();
++      List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
++      List<org.carbondata.format.ColumnSchema> table_columns = readIndexHeader.getTable_columns();
++      for (int i = 0; i < table_columns.size(); i++) {
++        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
++      }
++      // get the segment info
++      SegmentInfo segmentInfo = getSegmentInfo(readIndexHeader.getSegment_info());
++      BlockletIndex blockletIndex = null;
++      int counter = 0;
++      DataFileFooter dataFileFooter = null;
++      // read the block info from file
++      while (indexReader.hasNext()) {
++        BlockIndex readBlockIndexInfo = indexReader.readBlockIndexInfo();
++        blockletIndex = getBlockletIndex(readBlockIndexInfo.getBlock_index());
++        dataFileFooter = new DataFileFooter();
++        dataFileFooter.setBlockletIndex(blockletIndex);
++        dataFileFooter.setColumnInTable(columnSchemaList);
++        dataFileFooter.setNumberOfRows(readBlockIndexInfo.getNum_rows());
++        dataFileFooter.setTableBlockInfo(tableBlockInfoList.get(counter++));
++        dataFileFooter.setSegmentInfo(segmentInfo);
++        dataFileFooters.add(dataFileFooter);
++      }
++    } finally {
++      indexReader.closeThriftReader();
 +    }
 +    return dataFileFooters;
 +  }
 +
 +  /**
 +   * Below method will be used to convert thrift file meta to wrapper file meta
 +   */
 +  public DataFileFooter readDataFileFooter(String filePath, long blockOffset, long blockLength)
 +      throws IOException {
 +    DataFileFooter dataFileFooter = new DataFileFooter();
 +    FileHolder fileReader = null;
 +    try {
 +      long completeBlockLength = blockOffset + blockLength;
 +      long footerPointer = completeBlockLength - 8;
 +      fileReader = FileFactory.getFileHolder(FileFactory.getFileType(filePath));
 +      long actualFooterOffset = fileReader.readLong(filePath, footerPointer);
 +      CarbonFooterReader reader = new CarbonFooterReader(filePath, actualFooterOffset);
 +      FileFooter footer = reader.readFooter();
 +      dataFileFooter.setVersionId(footer.getVersion());
 +      dataFileFooter.setNumberOfRows(footer.getNum_rows());
 +      dataFileFooter.setSegmentInfo(getSegmentInfo(footer.getSegment_info()));
 +      List<ColumnSchema> columnSchemaList = new ArrayList<ColumnSchema>();
 +      List<org.carbondata.format.ColumnSchema> table_columns = footer.getTable_columns();
 +      for (int i = 0; i < table_columns.size(); i++) {
 +        columnSchemaList.add(thriftColumnSchmeaToWrapperColumnSchema(table_columns.get(i)));
 +      }
 +      dataFileFooter.setColumnInTable(columnSchemaList);
 +
 +      List<org.carbondata.format.BlockletIndex> leaf_node_indices_Thrift =
 +          footer.getBlocklet_index_list();
 +      List<BlockletIndex> blockletIndexList = new ArrayList<BlockletIndex>();
 +      for (int i = 0; i < leaf_node_indices_Thrift.size(); i++) {
 +        BlockletIndex blockletIndex = getBlockletIndex(leaf_node_indices_Thrift.get(i));
 +        blockletIndexList.add(blockletIndex);
 +      }
 +
 +      List<org.carbondata.format.BlockletInfo> leaf_node_infos_Thrift =
 +          footer.getBlocklet_info_list();
 +      List<BlockletInfo> blockletInfoList = new ArrayList<BlockletInfo>();
 +      for (int i = 0; i < leaf_node_infos_Thrift.size(); i++) {
 +        BlockletInfo blockletInfo = getBlockletInfo(leaf_node_infos_Thrift.get(i));
 +        blockletInfo.setBlockletIndex(blockletIndexList.get(i));
 +        blockletInfoList.add(blockletInfo);
 +      }
 +      dataFileFooter.setBlockletList(blockletInfoList);
 +      dataFileFooter.setBlockletIndex(getBlockletIndexForDataFileFooter(blockletIndexList));
 +    } finally {
 +      if (null != fileReader) {
 +        fileReader.finish();
 +      }
 +    }
 +    return dataFileFooter;
 +  }
 +
 +  /**
 +   * Below method will be used to get blocklet index for data file meta
 +   *
 +   * @param blockletIndexList
 +   * @return blocklet index
 +   */
 +  private BlockletIndex getBlockletIndexForDataFileFooter(List<BlockletIndex> blockletIndexList) {
 +    BlockletIndex blockletIndex = new BlockletIndex();
 +    BlockletBTreeIndex blockletBTreeIndex = new BlockletBTreeIndex();
 +    blockletBTreeIndex.setStartKey(blockletIndexList.get(0).getBtreeIndex().getStartKey());
 +    blockletBTreeIndex
 +        .setEndKey(blockletIndexList.get(blockletIndexList.size() - 1).getBtreeIndex().getEndKey());
 +    blockletIndex.setBtreeIndex(blockletBTreeIndex);
 +    byte[][] currentMinValue = blockletIndexList.get(0).getMinMaxIndex().getMinValues().clone();
 +    byte[][] currentMaxValue = blockletIndexList.get(0).getMinMaxIndex().getMaxValues().clone();
 +    byte[][] minValue = null;
 +    byte[][] maxValue = null;
 +    for (int i = 1; i < blockletIndexList.size(); i++) {
 +      minValue = blockletIndexList.get(i).getMinMaxIndex().getMinValues();
 +      maxValue = blockletIndexList.get(i).getMinMaxIndex().getMaxValues();
 +      for (int j = 0; j < maxValue.length; j++) {
 +        if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(currentMinValue[j], minValue[j]) > 0) {
 +          currentMinValue[j] = minValue[j].clone();
 +        }
 +        if (ByteUtil.UnsafeComparer.INSTANCE.compareTo(currentMaxValue[j], maxValue[j]) < 0) {
 +          currentMaxValue[j] = maxValue[j].clone();
 +        }
 +      }
 +    }
 +
 +    BlockletMinMaxIndex minMax = new BlockletMinMaxIndex();
 +    minMax.setMaxValues(currentMaxValue);
 +    minMax.setMinValues(currentMinValue);
 +    blockletIndex.setMinMaxIndex(minMax);
 +    return blockletIndex;
 +  }
 +
 +  private ColumnSchema thriftColumnSchmeaToWrapperColumnSchema(
 +      org.carbondata.format.ColumnSchema externalColumnSchema) {
 +    ColumnSchema wrapperColumnSchema = new ColumnSchema();
 +    wrapperColumnSchema.setColumnUniqueId(externalColumnSchema.getColumn_id());
 +    wrapperColumnSchema.setColumnName(externalColumnSchema.getColumn_name());
 +    wrapperColumnSchema.setColumnar(externalColumnSchema.isColumnar());
 +    wrapperColumnSchema
 +        .setDataType(thriftDataTyopeToWrapperDataType(externalColumnSchema.data_type));
 +    wrapperColumnSchema.setDimensionColumn(externalColumnSchema.isDimension());
 +    List<Encoding> encoders = new ArrayList<Encoding>();
 +    for (org.carbondata.format.Encoding encoder : externalColumnSchema.getEncoders()) {
 +      encoders.add(fromExternalToWrapperEncoding(encoder));
 +    }
 +    wrapperColumnSchema.setEncodingList(encoders);
 +    wrapperColumnSchema.setNumberOfChild(externalColumnSchema.getNum_child());
 +    wrapperColumnSchema.setPrecision(externalColumnSchema.getPrecision());
 +    wrapperColumnSchema.setColumnGroup(externalColumnSchema.getColumn_group_id());
 +    wrapperColumnSchema.setScale(externalColumnSchema.getScale());
 +    wrapperColumnSchema.setDefaultValue(externalColumnSchema.getDefault_value());
 +    wrapperColumnSchema.setAggregateFunction(externalColumnSchema.getAggregate_function());
 +    return wrapperColumnSchema;
 +  }
 +
 +  /**
 +   * Below method is to convert the blocklet info of the thrift to wrapper
 +   * blocklet info
 +   *
 +   * @param blockletInfoThrift blocklet info of the thrift
 +   * @return blocklet info wrapper
 +   */
 +  private BlockletInfo getBlockletInfo(org.carbondata.format.BlockletInfo blockletInfoThrift) {
 +    BlockletInfo blockletInfo = new BlockletInfo();
 +    List<DataChunk> dimensionColumnChunk = new ArrayList<DataChunk>();
 +    List<DataChunk> measureChunk = new ArrayList<DataChunk>();
 +    Iterator<org.carbondata.format.DataChunk> column_data_chunksIterator =
 +        blockletInfoThrift.getColumn_data_chunksIterator();
 +    if (null != column_data_chunksIterator) {
 +      while (column_data_chunksIterator.hasNext()) {
 +        org.carbondata.format.DataChunk next = column_data_chunksIterator.next();
 +        if (next.isRowMajor()) {
 +          dimensionColumnChunk.add(getDataChunk(next, false));
 +        } else if (next.getEncoders().contains(org.carbondata.format.Encoding.DELTA)) {
 +          measureChunk.add(getDataChunk(next, true));
 +        } else {
 +          dimensionColumnChunk.add(getDataChunk(next, false));
 +        }
 +      }
 +    }
 +    blockletInfo.setDimensionColumnChunk(dimensionColumnChunk);
 +    blockletInfo.setMeasureColumnChunk(measureChunk);
 +    blockletInfo.setNumberOfRows(blockletInfoThrift.getNum_rows());
 +    return blockletInfo;
 +  }
 +
 +  /**
 +   * Below method is convert the thrift encoding to wrapper encoding
 +   *
 +   * @param encoderThrift thrift encoding
 +   * @return wrapper encoding
 +   */
 +  private Encoding fromExternalToWrapperEncoding(org.carbondata.format.Encoding encoderThrift) {
 +    switch (encoderThrift) {
 +      case DICTIONARY:
 +        return Encoding.DICTIONARY;
 +      case DELTA:
 +        return Encoding.DELTA;
 +      case RLE:
 +        return Encoding.RLE;
 +      case INVERTED_INDEX:
 +        return Encoding.INVERTED_INDEX;
 +      case BIT_PACKED:
 +        return Encoding.BIT_PACKED;
 +      case DIRECT_DICTIONARY:
 +        return Encoding.DIRECT_DICTIONARY;
 +      default:
 +        return Encoding.DICTIONARY;
 +    }
 +  }
 +
 +  /**
 +   * Below method will be used to convert the thrift compression to wrapper
 +   * compression codec
 +   *
 +   * @param compressionCodecThrift
 +   * @return wrapper compression codec
 +   */
 +  private CompressionCodec getCompressionCodec(
 +      org.carbondata.format.CompressionCodec compressionCodecThrift) {
 +    switch (compressionCodecThrift) {
 +      case SNAPPY:
 +        return CompressionCodec.SNAPPY;
 +      default:
 +        return CompressionCodec.SNAPPY;
 +    }
 +  }
 +
 +  /**
 +   * Below method will be used to convert thrift segment object to wrapper
 +   * segment object
 +   *
 +   * @param segmentInfo thrift segment info object
 +   * @return wrapper segment info object
 +   */
 +  private SegmentInfo getSegmentInfo(org.carbondata.format.SegmentInfo segmentInfo) {
 +    SegmentInfo info = new SegmentInfo();
 +    int[] cardinality = new int[segmentInfo.getColumn_cardinalities().size()];
 +    for (int i = 0; i < cardinality.length; i++) {
 +      cardinality[i] = segmentInfo.getColumn_cardinalities().get(i);
 +    }
 +    info.setColumnCardinality(cardinality);
 +    info.setNumberOfColumns(segmentInfo.getNum_cols());
 +    return info;
 +  }
 +
 +  /**
 +   * Below method will be used to convert the blocklet index of thrift to
 +   * wrapper
 +   *
 +   * @param blockletIndexThrift
 +   * @return blocklet index wrapper
 +   */
 +  private BlockletIndex getBlockletIndex(org.carbondata.format.BlockletIndex blockletIndexThrift) {
 +    org.carbondata.format.BlockletBTreeIndex btreeIndex = blockletIndexThrift.getB_tree_index();
 +    org.carbondata.format.BlockletMinMaxIndex minMaxIndex = blockletIndexThrift.getMin_max_index();
 +    return new BlockletIndex(
 +        new BlockletBTreeIndex(btreeIndex.getStart_key(), btreeIndex.getEnd_key()),
 +        new BlockletMinMaxIndex(minMaxIndex.getMin_values(), minMaxIndex.getMax_values()));
 +  }
 +
 +  /**
 +   * Below method will be used to convert the thrift compression meta to
 +   * wrapper chunk compression meta
 +   *
 +   * @param chunkCompressionMetaThrift
 +   * @return chunkCompressionMetaWrapper
 +   */
 +  private ChunkCompressorMeta getChunkCompressionMeta(
 +      org.carbondata.format.ChunkCompressionMeta chunkCompressionMetaThrift) {
 +    ChunkCompressorMeta compressorMeta = new ChunkCompressorMeta();
 +    compressorMeta
 +        .setCompressor(getCompressionCodec(chunkCompressionMetaThrift.getCompression_codec()));
 +    compressorMeta.setCompressedSize(chunkCompressionMetaThrift.getTotal_compressed_size());
 +    compressorMeta.setUncompressedSize(chunkCompressionMetaThrift.getTotal_uncompressed_size());
 +    return compressorMeta;
 +  }
 +
 +  /**
 +   * Below method will be used to convert the thrift data type to wrapper data
 +   * type
 +   *
 +   * @param dataTypeThrift
 +   * @return dataType wrapper
 +   */
 +  private DataType thriftDataTyopeToWrapperDataType(org.carbondata.format.DataType dataTypeThrift) {
 +    switch (dataTypeThrift) {
 +      case STRING:
 +        return DataType.STRING;
 +      case SHORT:
 +        return DataType.SHORT;
 +      case INT:
 +        return DataType.INT;
 +      case LONG:
 +        return DataType.LONG;
 +      case DOUBLE:
 +        return DataType.DOUBLE;
 +      case DECIMAL:
 +        return DataType.DECIMAL;
 +      case TIMESTAMP:
 +        return DataType.TIMESTAMP;
 +      case ARRAY:
 +        return DataType.ARRAY;
 +      case STRUCT:
 +        return DataType.STRUCT;
 +      default:
 +        return DataType.STRING;
 +    }
 +  }
 +
 +  /**
 +   * Below method will be used to convert the thrift presence meta to wrapper
 +   * presence meta
 +   *
 +   * @param presentMetadataThrift
 +   * @return wrapper presence meta
 +   */
 +  private PresenceMeta getPresenceMeta(org.carbondata.format.PresenceMeta presentMetadataThrift) {
 +    PresenceMeta presenceMeta = new PresenceMeta();
 +    presenceMeta.setRepresentNullValues(presentMetadataThrift.isRepresents_presence());
 +    presenceMeta.setBitSet(BitSet.valueOf(presentMetadataThrift.getPresent_bit_stream()));
 +    return presenceMeta;
 +  }
 +
 +  /**
 +   * Below method will be used to convert the thrift object to wrapper object
 +   *
 +   * @param sortStateThrift
 +   * @return wrapper sort state object
 +   */
 +  private SortState getSortState(org.carbondata.format.SortState sortStateThrift) {
 +    if (sortStateThrift == org.carbondata.format.SortState.SORT_EXPLICIT) {
 +      return SortState.SORT_EXPLICT;
 +    } else if (sortStateThrift == org.carbondata.format.SortState.SORT_NATIVE) {
 +      return SortState.SORT_NATIVE;
 +    } else {
 +      return SortState.SORT_NONE;
 +    }
 +  }
 +
 +  /**
 +   * Below method will be used to convert the thrift data chunk to wrapper
 +   * data chunk
 +   *
 +   * @param datachunkThrift
 +   * @return wrapper data chunk
 +   */
 +  private DataChunk getDataChunk(org.carbondata.format.DataChunk datachunkThrift,
 +      boolean isPresenceMetaPresent) {
 +    DataChunk dataChunk = new DataChunk();
 +    dataChunk.setColumnUniqueIdList(datachunkThrift.getColumn_ids());
 +    dataChunk.setDataPageLength(datachunkThrift.getData_page_length());
 +    dataChunk.setDataPageOffset(datachunkThrift.getData_page_offset());
 +    if (isPresenceMetaPresent) {
 +      dataChunk.setNullValueIndexForColumn(getPresenceMeta(datachunkThrift.getPresence()));
 +    }
 +    dataChunk.setRlePageLength(datachunkThrift.getRle_page_length());
 +    dataChunk.setRlePageOffset(datachunkThrift.getRle_page_offset());
 +    dataChunk.setRowMajor(datachunkThrift.isRowMajor());
 +    dataChunk.setRowIdPageLength(datachunkThrift.getRowid_page_length());
 +    dataChunk.setRowIdPageOffset(datachunkThrift.getRowid_page_offset());
 +    dataChunk.setSortState(getSortState(datachunkThrift.getSort_state()));
 +    dataChunk.setChunkCompressionMeta(getChunkCompressionMeta(datachunkThrift.getChunk_meta()));
 +    List<Encoding> encodingList = new ArrayList<Encoding>(datachunkThrift.getEncoders().size());
 +    for (int i = 0; i < datachunkThrift.getEncoders().size(); i++) {
 +      encodingList.add(fromExternalToWrapperEncoding(datachunkThrift.getEncoders().get(i)));
 +    }
 +    dataChunk.setEncoderList(encodingList);
 +    if (encodingList.contains(Encoding.DELTA)) {
 +      List<ByteBuffer> thriftEncoderMeta = datachunkThrift.getEncoder_meta();
 +      List<ValueEncoderMeta> encodeMetaList =
 +          new ArrayList<ValueEncoderMeta>(thriftEncoderMeta.size());
 +      for (int i = 0; i < thriftEncoderMeta.size(); i++) {
 +        encodeMetaList.add(deserializeEncoderMeta(thriftEncoderMeta.get(i).array()));
 +      }
 +      dataChunk.setValueEncoderMeta(encodeMetaList);
 +    }
 +    return dataChunk;
 +  }
 +
 +  /**
 +   * Below method will be used to convert the encode metadata to
 +   * ValueEncoderMeta object
 +   *
 +   * @param encoderMeta
 +   * @return ValueEncoderMeta object
 +   */
 +  private ValueEncoderMeta deserializeEncoderMeta(byte[] encoderMeta) {
 +    // TODO : should remove the unnecessary fields.
 +    ByteArrayInputStream aos = null;
 +    ObjectInputStream objStream = null;
 +    ValueEncoderMeta meta = null;
 +    try {
 +      aos = new ByteArrayInputStream(encoderMeta);
 +      objStream = new ObjectInputStream(aos);
 +      meta = (ValueEncoderMeta) objStream.readObject();
 +    } catch (ClassNotFoundException e) {
 +      LOGGER.error(e);
 +    } catch (IOException e) {
 +      CarbonUtil.closeStreams(objStream);
 +    }
 +    return meta;
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
index 6e2308a,e6d6422..b719607
--- a/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
+++ b/core/src/main/java/org/carbondata/core/util/DataTypeUtil.java
@@@ -275,9 -186,44 +278,46 @@@ public final class DataTypeUtil 
            return data;
        }
      } catch (NumberFormatException ex) {
 +      LOGGER.error("Problem while converting data type" + data);
        return null;
      }
 +
    }
+ 
+   /**
+    * This method will parse a given string value corresponding to its datatype
+    *
+    * @param value    value to parse
+    * @param dataType datatype for that value
+    * @return
+    */
+   public static boolean validateColumnValueForItsDataType(String value, DataType dataType) {
+     try {
+       Object parsedValue = null;
+       // validation will not be done for timestamp datatype as for timestamp direct dictionary
+       // is generated. No dictionary file is created for timestamp datatype column
+       switch (dataType) {
+         case DECIMAL:
+           parsedValue = new BigDecimal(value);
+           break;
+         case INT:
+           parsedValue = Integer.parseInt(value);
+           break;
+         case LONG:
+           parsedValue = Long.valueOf(value);
+           break;
+         case DOUBLE:
+           parsedValue = Double.valueOf(value);
+           break;
+         default:
+           return true;
+       }
+       if (null != parsedValue) {
+         return true;
+       }
+       return false;
+     } catch (Exception e) {
+       return false;
+     }
+   }
  }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/executor/impl/AbstractQueryExecutor.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/executor/impl/AbstractQueryExecutor.java
index eb2261d,0000000..dd23119
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/executor/impl/AbstractQueryExecutor.java
+++ b/core/src/main/java/org/carbondata/scan/executor/impl/AbstractQueryExecutor.java
@@@ -1,403 -1,0 +1,441 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.carbondata.scan.executor.impl;
 +
 +import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.HashSet;
 +import java.util.List;
 +import java.util.Set;
 +
 +import org.carbondata.common.logging.LogService;
 +import org.carbondata.common.logging.LogServiceFactory;
 +import org.carbondata.common.logging.impl.StandardLogService;
 +import org.carbondata.core.carbon.datastore.BlockIndexStore;
 +import org.carbondata.core.carbon.datastore.IndexKey;
 +import org.carbondata.core.carbon.datastore.block.AbstractIndex;
 +import org.carbondata.core.carbon.datastore.block.SegmentProperties;
 +import org.carbondata.core.carbon.datastore.exception.IndexBuilderException;
 +import org.carbondata.core.carbon.metadata.datatype.DataType;
 +import org.carbondata.core.carbon.metadata.encoder.Encoding;
 +import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
 +import org.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
++import org.carbondata.core.carbon.querystatistics.QueryStatistic;
++import org.carbondata.core.carbon.querystatistics.QueryStatisticsRecorder;
 +import org.carbondata.core.constants.CarbonCommonConstants;
 +import org.carbondata.core.datastorage.store.impl.FileFactory;
 +import org.carbondata.core.keygenerator.KeyGenException;
 +import org.carbondata.core.keygenerator.KeyGenerator;
 +import org.carbondata.core.util.CarbonUtil;
 +import org.carbondata.scan.executor.QueryExecutor;
 +import org.carbondata.scan.executor.exception.QueryExecutionException;
 +import org.carbondata.scan.executor.infos.AggregatorInfo;
 +import org.carbondata.scan.executor.infos.BlockExecutionInfo;
 +import org.carbondata.scan.executor.infos.KeyStructureInfo;
 +import org.carbondata.scan.executor.infos.SortInfo;
 +import org.carbondata.scan.executor.util.QueryUtil;
 +import org.carbondata.scan.executor.util.RestructureUtil;
 +import org.carbondata.scan.filter.FilterUtil;
 +import org.carbondata.scan.model.QueryDimension;
 +import org.carbondata.scan.model.QueryMeasure;
 +import org.carbondata.scan.model.QueryModel;
 +
 +import org.apache.commons.lang3.ArrayUtils;
 +
 +/**
 + * This class provides a skeletal implementation of the {@link QueryExecutor}
 + * interface to minimize the effort required to implement this interface. This
 + * will be used to prepare all the properties required for query execution
 + */
 +public abstract class AbstractQueryExecutor<E> implements QueryExecutor<E> {
 +
 +  private static final LogService LOGGER =
 +      LogServiceFactory.getLogService(AbstractQueryExecutor.class.getName());
 +  /**
 +   * holder for query properties which will be used to execute the query
 +   */
 +  protected QueryExecutorProperties queryProperties;
 +
 +  public AbstractQueryExecutor() {
 +    queryProperties = new QueryExecutorProperties();
 +  }
 +
 +  /**
 +   * Below method will be used to fill the executor properties based on query
 +   * model it will parse the query model and get the detail and fill it in
 +   * query properties
 +   *
 +   * @param queryModel
 +   */
 +  protected void initQuery(QueryModel queryModel) throws QueryExecutionException {
 +    StandardLogService.setThreadName(StandardLogService.getPartitionID(
 +        queryModel.getAbsoluteTableIdentifier().getCarbonTableIdentifier().getTableName()),
 +        queryModel.getQueryId());
 +    LOGGER.info("Query will be executed on table: " + queryModel.getAbsoluteTableIdentifier()
 +        .getCarbonTableIdentifier().getTableName());
- 
++    // Initializing statistics list to record the query statistics
++    // creating copy on write to handle concurrent scenario
++    queryProperties.queryStatisticsRecorder = new QueryStatisticsRecorder(queryModel.getQueryId());
++    queryModel.setStatisticsRecorder(queryProperties.queryStatisticsRecorder);
 +    QueryUtil.resolveQueryModel(queryModel);
- 
++    QueryStatistic queryStatistic = new QueryStatistic();
 +    // get the table blocks
 +    try {
 +      queryProperties.dataBlocks = BlockIndexStore.getInstance()
 +          .loadAndGetBlocks(queryModel.getTableBlockInfos(),
 +              queryModel.getAbsoluteTableIdentifier());
 +    } catch (IndexBuilderException e) {
 +      throw new QueryExecutionException(e);
 +    }
++    queryStatistic
++        .addStatistics("Time taken to load the Block(s) In Executor", System.currentTimeMillis());
++    queryProperties.queryStatisticsRecorder.recordStatistics(queryStatistic);
 +    //
 +    // // updating the restructuring infos for the query
 +    queryProperties.keyStructureInfo = getKeyStructureInfo(queryModel,
 +        queryProperties.dataBlocks.get(queryProperties.dataBlocks.size() - 1).getSegmentProperties()
 +            .getDimensionKeyGenerator());
 +
 +    // calculating the total number of aggeragted columns
 +    int aggTypeCount = queryModel.getQueryMeasures().size();
 +
 +    int currentIndex = 0;
 +    String[] aggTypes = new String[aggTypeCount];
 +    DataType[] dataTypes = new DataType[aggTypeCount];
 +
 +    for (QueryMeasure carbonMeasure : queryModel.getQueryMeasures()) {
 +      // adding the data type and aggregation type of all the measure this
 +      // can be used
 +      // to select the aggregator
 +      aggTypes[currentIndex] = carbonMeasure.getAggregateFunction();
 +      dataTypes[currentIndex] = carbonMeasure.getMeasure().getDataType();
 +      currentIndex++;
 +    }
 +    queryProperties.measureDataTypes = dataTypes;
 +    // as aggregation will be executed in following order
 +    // 1.aggregate dimension expression
 +    // 2. expression
 +    // 3. query measure
 +    // so calculating the index of the expression start index
 +    // and measure column start index
 +    queryProperties.aggExpressionStartIndex = queryModel.getQueryMeasures().size();
 +    queryProperties.measureStartIndex = aggTypes.length - queryModel.getQueryMeasures().size();
 +
++    queryProperties.complexFilterDimension =
++        QueryUtil.getAllFilterDimensions(queryModel.getFilterExpressionResolverTree());
++    queryStatistic = new QueryStatistic();
 +    // dictionary column unique column id to dictionary mapping
 +    // which will be used to get column actual data
 +    queryProperties.columnToDictionayMapping = QueryUtil
 +        .getDimensionDictionaryDetail(queryModel.getQueryDimension(),
-             queryModel.getAbsoluteTableIdentifier());
++            queryProperties.complexFilterDimension, queryModel.getAbsoluteTableIdentifier());
++    queryStatistic
++        .addStatistics("Time taken to load the Dictionary In Executor", System.currentTimeMillis());
++    queryProperties.queryStatisticsRecorder.recordStatistics(queryStatistic);
 +    queryModel.setColumnToDictionaryMapping(queryProperties.columnToDictionayMapping);
 +    // setting the sort dimension index. as it will be updated while getting the sort info
 +    // so currently setting it to default 0 means sort is not present in any dimension
 +    queryProperties.sortDimIndexes = new byte[queryModel.getQueryDimension().size()];
 +  }
 +
 +  /**
 +   * Below method will be used to get the key structure info for the uqery
 +   *
 +   * @param queryModel   query model
 +   * @param keyGenerator
 +   * @return key structure info
 +   */
 +  private KeyStructureInfo getKeyStructureInfo(QueryModel queryModel, KeyGenerator keyGenerator) {
 +    // getting the masked byte range for dictionary column
 +    int[] maskByteRanges =
 +        QueryUtil.getMaskedByteRange(queryModel.getQueryDimension(), keyGenerator);
 +
 +    // getting the masked bytes for query dimension dictionary column
 +    int[] maskedBytes = QueryUtil.getMaskedByte(keyGenerator.getKeySizeInBytes(), maskByteRanges);
 +
 +    // max key for the dictionary dimension present in the query
 +    byte[] maxKey = null;
 +    try {
 +      // getting the max key which will be used to masked and get the
 +      // masked key
 +      maxKey = QueryUtil.getMaxKeyBasedOnDimensions(queryModel.getQueryDimension(), keyGenerator);
 +    } catch (KeyGenException e) {
 +      LOGGER.error(e, "problem while getting the max key");
 +    }
 +
 +    KeyStructureInfo restructureInfos = new KeyStructureInfo();
 +    restructureInfos.setKeyGenerator(keyGenerator);
 +    restructureInfos.setMaskByteRanges(maskByteRanges);
 +    restructureInfos.setMaskedBytes(maskedBytes);
 +    restructureInfos.setMaxKey(maxKey);
 +    return restructureInfos;
 +  }
 +
 +  protected List<BlockExecutionInfo> getBlockExecutionInfos(QueryModel queryModel)
 +      throws QueryExecutionException {
 +    initQuery(queryModel);
 +    List<BlockExecutionInfo> blockExecutionInfoList = new ArrayList<BlockExecutionInfo>();
 +    // fill all the block execution infos for all the blocks selected in
 +    // query
 +    // and query will be executed based on that infos
 +    for (int i = 0; i < queryProperties.dataBlocks.size(); i++) {
 +      blockExecutionInfoList
 +          .add(getBlockExecutionInfoForBlock(queryModel, queryProperties.dataBlocks.get(i)));
 +    }
++    queryProperties.complexDimensionInfoMap =
++        blockExecutionInfoList.get(blockExecutionInfoList.size() - 1).getComlexDimensionInfoMap();
 +    return blockExecutionInfoList;
 +  }
 +
 +  /**
 +   * Below method will be used to get the block execution info which is
 +   * required to execute any block  based on query model
 +   *
 +   * @param queryModel query model from user query
 +   * @param blockIndex block index
 +   * @return block execution info
 +   * @throws QueryExecutionException any failure during block info creation
 +   */
 +  protected BlockExecutionInfo getBlockExecutionInfoForBlock(QueryModel queryModel,
 +      AbstractIndex blockIndex) throws QueryExecutionException {
 +    BlockExecutionInfo blockExecutionInfo = new BlockExecutionInfo();
 +    SegmentProperties segmentProperties = blockIndex.getSegmentProperties();
 +    List<CarbonDimension> tableBlockDimensions = segmentProperties.getDimensions();
 +    KeyGenerator blockKeyGenerator = segmentProperties.getDimensionKeyGenerator();
 +
 +    // below is to get only those dimension in query which is present in the
 +    // table block
 +    List<QueryDimension> updatedQueryDimension = RestructureUtil
-         .getUpdatedQueryDimension(queryModel.getQueryDimension(), tableBlockDimensions);
++        .getUpdatedQueryDimension(queryModel.getQueryDimension(), tableBlockDimensions,
++            segmentProperties.getComplexDimensions());
 +    // TODO add complex dimension children
 +    int[] maskByteRangesForBlock =
 +        QueryUtil.getMaskedByteRange(updatedQueryDimension, blockKeyGenerator);
 +    int[] maksedByte =
 +        QueryUtil.getMaskedByte(blockKeyGenerator.getKeySizeInBytes(), maskByteRangesForBlock);
 +    blockExecutionInfo.setDimensionsExistInQuery(updatedQueryDimension.size() > 0);
 +    blockExecutionInfo.setDataBlock(blockIndex);
 +    blockExecutionInfo.setBlockKeyGenerator(blockKeyGenerator);
 +    // adding aggregation info for query
 +    blockExecutionInfo.setAggregatorInfo(getAggregatorInfoForBlock(queryModel, blockIndex));
- 
++    // adding query statistics list to record the statistics
++    blockExecutionInfo.setStatisticsRecorder(queryProperties.queryStatisticsRecorder);
 +    // setting the limit
 +    blockExecutionInfo.setLimit(queryModel.getLimit());
 +    // setting whether detail query or not
 +    blockExecutionInfo.setDetailQuery(queryModel.isDetailQuery());
 +    // setting whether raw record query or not
 +    blockExecutionInfo.setRawRecordDetailQuery(queryModel.isForcedDetailRawQuery());
 +    // setting the masked byte of the block which will be
 +    // used to update the unpack the older block keys
 +    blockExecutionInfo.setMaskedByteForBlock(maksedByte);
 +    // total number dimension
 +    blockExecutionInfo
 +        .setTotalNumberDimensionBlock(segmentProperties.getDimensionOrdinalToBlockMapping().size());
 +    blockExecutionInfo
 +        .setTotalNumberOfMeasureBlock(segmentProperties.getMeasuresOrdinalToBlockMapping().size());
++    blockExecutionInfo.setComplexDimensionInfoMap(QueryUtil
++        .getComplexDimensionsMap(updatedQueryDimension,
++            segmentProperties.getDimensionOrdinalToBlockMapping(),
++            segmentProperties.getEachComplexDimColumnValueSize(),
++            queryProperties.columnToDictionayMapping, queryProperties.complexFilterDimension));
 +    // to check whether older block key update is required or not
 +    blockExecutionInfo.setFixedKeyUpdateRequired(
-         blockKeyGenerator.equals(queryProperties.keyStructureInfo.getKeyGenerator()));
++        !blockKeyGenerator.equals(queryProperties.keyStructureInfo.getKeyGenerator()));
 +    IndexKey startIndexKey = null;
 +    IndexKey endIndexKey = null;
 +    if (null != queryModel.getFilterExpressionResolverTree()) {
 +      // loading the filter executer tree for filter evaluation
 +      blockExecutionInfo.setFilterExecuterTree(FilterUtil
-           .getFilterExecuterTree(queryModel.getFilterExpressionResolverTree(), segmentProperties));
++          .getFilterExecuterTree(queryModel.getFilterExpressionResolverTree(), segmentProperties,
++              blockExecutionInfo.getComlexDimensionInfoMap()));
 +      List<IndexKey> listOfStartEndKeys = new ArrayList<IndexKey>(2);
 +      FilterUtil.traverseResolverTreeAndGetStartAndEndKey(segmentProperties,
 +          queryModel.getAbsoluteTableIdentifier(), queryModel.getFilterExpressionResolverTree(),
 +          listOfStartEndKeys);
 +      startIndexKey = listOfStartEndKeys.get(0);
 +      endIndexKey = listOfStartEndKeys.get(1);
 +    } else {
 +      try {
 +        startIndexKey = FilterUtil.prepareDefaultStartIndexKey(segmentProperties);
 +        endIndexKey = FilterUtil.prepareDefaultEndIndexKey(segmentProperties);
 +      } catch (KeyGenException e) {
 +        throw new QueryExecutionException(e);
 +      }
 +    }
 +    blockExecutionInfo.setFileType(
 +        FileFactory.getFileType(queryModel.getAbsoluteTableIdentifier().getStorePath()));
 +    //setting the start index key of the block node
 +    blockExecutionInfo.setStartKey(startIndexKey);
 +    //setting the end index key of the block node
 +    blockExecutionInfo.setEndKey(endIndexKey);
 +    // expression dimensions
 +    List<CarbonDimension> expressionDimensions =
 +        new ArrayList<CarbonDimension>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
 +    // expression measure
 +    List<CarbonMeasure> expressionMeasures =
 +        new ArrayList<CarbonMeasure>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
 +    // setting all the dimension chunk indexes to be read from file
 +    blockExecutionInfo.setAllSelectedDimensionBlocksIndexes(QueryUtil
 +        .getDimensionsBlockIndexes(updatedQueryDimension,
 +            segmentProperties.getDimensionOrdinalToBlockMapping(), expressionDimensions));
 +    // setting all the measure chunk indexes to be read from file
 +    blockExecutionInfo.setAllSelectedMeasureBlocksIndexes(QueryUtil
 +        .getMeasureBlockIndexes(queryModel.getQueryMeasures(), expressionMeasures,
 +            segmentProperties.getMeasuresOrdinalToBlockMapping()));
 +    // setting the key structure info which will be required
 +    // to update the older block key with new key generator
 +    blockExecutionInfo.setKeyStructureInfo(queryProperties.keyStructureInfo);
 +    // setting the size of fixed key column (dictionary column)
 +    blockExecutionInfo.setFixedLengthKeySize(getKeySize(updatedQueryDimension, segmentProperties));
 +    Set<Integer> dictionaryColumnBlockIndex = new HashSet<Integer>();
 +    List<Integer> noDictionaryColumnBlockIndex = new ArrayList<Integer>();
 +    // get the block index to be read from file for query dimension
 +    // for both dictionary columns and no dictionary columns
 +    QueryUtil.fillQueryDimensionsBlockIndexes(updatedQueryDimension,
 +        segmentProperties.getDimensionOrdinalToBlockMapping(), dictionaryColumnBlockIndex,
 +        noDictionaryColumnBlockIndex);
-     int[] queryDictionaruColumnBlockIndexes = ArrayUtils.toPrimitive(
++    int[] queryDictionaryColumnBlockIndexes = ArrayUtils.toPrimitive(
 +        dictionaryColumnBlockIndex.toArray(new Integer[dictionaryColumnBlockIndex.size()]));
 +    // need to sort the dictionary column as for all dimension
 +    // column key will be filled based on key order
-     Arrays.sort(queryDictionaruColumnBlockIndexes);
-     blockExecutionInfo.setDictionaryColumnBlockIndex(queryDictionaruColumnBlockIndexes);
++    Arrays.sort(queryDictionaryColumnBlockIndexes);
++    blockExecutionInfo.setDictionaryColumnBlockIndex(queryDictionaryColumnBlockIndexes);
 +    // setting the no dictionary column block indexes
 +    blockExecutionInfo.setNoDictionaryBlockIndexes(ArrayUtils.toPrimitive(
 +        noDictionaryColumnBlockIndex.toArray(new Integer[noDictionaryColumnBlockIndex.size()])));
 +    // setting column id to dictionary mapping
 +    blockExecutionInfo.setColumnIdToDcitionaryMapping(queryProperties.columnToDictionayMapping);
 +    // setting each column value size
 +    blockExecutionInfo.setEachColumnValueSize(segmentProperties.getEachDimColumnValueSize());
++    blockExecutionInfo.setComplexColumnParentBlockIndexes(
++        getComplexDimensionParentBlockIndexes(updatedQueryDimension));
 +    try {
 +      // to set column group and its key structure info which will be used
 +      // to
 +      // for getting the column group column data in case of final row
 +      // and in case of dimension aggregation
 +      blockExecutionInfo.setColumnGroupToKeyStructureInfo(
 +          QueryUtil.getColumnGroupKeyStructureInfo(updatedQueryDimension, segmentProperties));
 +    } catch (KeyGenException e) {
 +      throw new QueryExecutionException(e);
 +    }
 +    return blockExecutionInfo;
 +  }
 +
 +  /**
 +   * This method will be used to get fixed key length size this will be used
 +   * to create a row from column chunk
 +   *
 +   * @param queryDimension    query dimension
 +   * @param blockMetadataInfo block metadata info
 +   * @return key size
 +   */
 +  private int getKeySize(List<QueryDimension> queryDimension, SegmentProperties blockMetadataInfo) {
 +    List<Integer> fixedLengthDimensionOrdinal =
 +        new ArrayList<Integer>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
 +    int counter = 0;
 +    while (counter < queryDimension.size()) {
 +      if (queryDimension.get(counter).getDimension().numberOfChild() > 0) {
 +        counter += queryDimension.get(counter).getDimension().numberOfChild();
 +        continue;
 +      } else if (!CarbonUtil.hasEncoding(queryDimension.get(counter).getDimension().getEncoder(),
 +          Encoding.DICTIONARY)) {
 +        counter++;
 +      } else {
 +        fixedLengthDimensionOrdinal.add(queryDimension.get(counter).getDimension().getKeyOrdinal());
 +        counter++;
 +      }
 +    }
 +    int[] dictioanryColumnOrdinal = ArrayUtils.toPrimitive(
 +        fixedLengthDimensionOrdinal.toArray(new Integer[fixedLengthDimensionOrdinal.size()]));
 +    if (dictioanryColumnOrdinal.length > 0) {
 +      return blockMetadataInfo.getFixedLengthKeySplitter()
 +          .getKeySizeByBlock(dictioanryColumnOrdinal);
 +    }
 +    return 0;
 +  }
 +
 +  /**
 +   * Below method will be used to get the sort information which will be
 +   * required during sorting the data on dimension column
 +   *
 +   * @param queryModel query model
 +   * @return Sort infos
 +   * @throws QueryExecutionException if problem while
 +   */
 +  protected SortInfo getSortInfos(QueryModel queryModel) throws QueryExecutionException {
 +
 +    // get the masked by range for order by dimension
 +    int[][] maskedByteRangeForSorting = QueryUtil
 +        .getMaskedByteRangeForSorting(queryModel.getSortDimension(),
 +            queryProperties.keyStructureInfo.getKeyGenerator(),
 +            queryProperties.keyStructureInfo.getMaskByteRanges());
 +    // get masked key for sorting
 +    byte[][] maksedKeyForSorting = QueryUtil.getMaksedKeyForSorting(queryModel.getSortDimension(),
 +        queryProperties.keyStructureInfo.getKeyGenerator(), maskedByteRangeForSorting,
 +        queryProperties.keyStructureInfo.getMaskByteRanges());
 +    // fill sort dimension indexes
 +    queryProperties.sortDimIndexes = QueryUtil
 +        .getSortDimensionIndexes(queryModel.getSortDimension(), queryModel.getQueryDimension());
 +    SortInfo sortInfos = new SortInfo();
 +    sortInfos.setDimensionMaskKeyForSorting(maksedKeyForSorting);
 +    sortInfos.setDimensionSortOrder(queryModel.getSortOrder());
 +    sortInfos.setMaskedByteRangeForSorting(maskedByteRangeForSorting);
 +    sortInfos.setSortDimensionIndex(queryProperties.sortDimIndexes);
 +    sortInfos.setSortDimension(queryModel.getSortDimension());
 +    return sortInfos;
 +  }
 +
 +  /**
 +   * Below method will be used to get the aggrgator info for the query
 +   *
 +   * @param queryModel query model
 +   * @param tableBlock table block
 +   * @return aggregator info
 +   */
 +  private AggregatorInfo getAggregatorInfoForBlock(QueryModel queryModel,
 +      AbstractIndex tableBlock) {
 +    // getting the aggregate infos which will be used during aggregation
 +    AggregatorInfo aggregatorInfos = RestructureUtil
 +        .getAggregatorInfos(queryModel.getQueryMeasures(),
 +            tableBlock.getSegmentProperties().getMeasures());
 +    // setting the index of expression in measure aggregators
 +    aggregatorInfos.setExpressionAggregatorStartIndex(queryProperties.aggExpressionStartIndex);
 +    // setting the index of measure columns in measure aggregators
 +    aggregatorInfos.setMeasureAggregatorStartIndex(queryProperties.measureStartIndex);
 +    // setting the measure aggregator for all aggregation function selected
 +    // in query
 +    aggregatorInfos.setMeasureDataTypes(queryProperties.measureDataTypes);
 +    return aggregatorInfos;
 +  }
 +
++  private int[] getComplexDimensionParentBlockIndexes(List<QueryDimension> queryDimensions) {
++    List<Integer> parentBlockIndexList = new ArrayList<Integer>();
++    for (QueryDimension queryDimension : queryDimensions) {
++      if (CarbonUtil.hasDataType(queryDimension.getDimension().getDataType(),
++          new DataType[] { DataType.ARRAY, DataType.STRUCT, DataType.MAP })) {
++        parentBlockIndexList.add(queryDimension.getDimension().getOrdinal());
++      }
++    }
++    return ArrayUtils
++        .toPrimitive(parentBlockIndexList.toArray(new Integer[parentBlockIndexList.size()]));
++  }
++
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/executor/impl/QueryExecutorProperties.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/executor/impl/QueryExecutorProperties.java
index a004dce,0000000..10fac8d
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/executor/impl/QueryExecutorProperties.java
+++ b/core/src/main/java/org/carbondata/scan/executor/impl/QueryExecutorProperties.java
@@@ -1,78 -1,0 +1,90 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.carbondata.scan.executor.impl;
 +
 +import java.util.List;
 +import java.util.Map;
++import java.util.Set;
 +
 +import org.carbondata.core.cache.dictionary.Dictionary;
 +import org.carbondata.core.carbon.datastore.block.AbstractIndex;
 +import org.carbondata.core.carbon.metadata.datatype.DataType;
++import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
++import org.carbondata.core.carbon.querystatistics.QueryStatisticsRecorder;
 +import org.carbondata.scan.executor.infos.KeyStructureInfo;
++import org.carbondata.scan.filter.GenericQueryType;
 +
 +/**
 + * Holds all the properties required for query execution
 + */
 +public class QueryExecutorProperties {
 +
 +  /**
-    * list of blocks in which query will be executed
-    */
-   protected List<AbstractIndex> dataBlocks;
- 
-   /**
 +   * holds the information required for updating the order block
 +   * dictionary key
 +   */
 +  public KeyStructureInfo keyStructureInfo;
- 
 +  /**
 +   * as we have multiple type of column aggregation like
 +   * dimension,expression,measure so this will be used to for getting the
 +   * measure aggregation start index
 +   */
 +  public int measureStartIndex;
- 
 +  /**
 +   * query like count(1),count(*) ,etc will used this parameter
 +   */
 +  public boolean isFunctionQuery;
- 
 +  /**
 +   * aggExpressionStartIndex
 +   */
 +  public int aggExpressionStartIndex;
- 
 +  /**
 +   * index of the dimension which is present in the order by
 +   * in a query
 +   */
 +  public byte[] sortDimIndexes;
 +
 +  /**
 +   * this will hold the information about the dictionary dimension
 +   * which to
 +   */
 +  public Map<String, Dictionary> columnToDictionayMapping;
 +
 +  /**
 +   * Measure datatypes
 +   */
 +  public DataType[] measureDataTypes;
++  /**
++   * complex parent index to query mapping
++   */
++  public Map<Integer, GenericQueryType> complexDimensionInfoMap;
++  /**
++   * all the complex dimension which is on filter
++   */
++  public Set<CarbonDimension> complexFilterDimension;
++  /**
++   * to record the query execution details phase wise
++   */
++  public QueryStatisticsRecorder queryStatisticsRecorder;
++  /**
++   * list of blocks in which query will be executed
++   */
++  protected List<AbstractIndex> dataBlocks;
++
 +}


Mime
View raw message