carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chenliang...@apache.org
Subject [46/50] [abbrv] incubator-carbondata git commit: Merge remote-tracking branch 'carbon_master/master' into apache/master
Date Wed, 20 Jul 2016 10:14:14 GMT
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/executor/infos/BlockExecutionInfo.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/executor/infos/BlockExecutionInfo.java
index dc55e46,0000000..c835b1c
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/executor/infos/BlockExecutionInfo.java
+++ b/core/src/main/java/org/carbondata/scan/executor/infos/BlockExecutionInfo.java
@@@ -1,611 -1,0 +1,663 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.carbondata.scan.executor.infos;
 +
 +import java.util.Map;
 +
 +import org.carbondata.core.cache.dictionary.Dictionary;
 +import org.carbondata.core.carbon.datastore.DataRefNode;
 +import org.carbondata.core.carbon.datastore.IndexKey;
 +import org.carbondata.core.carbon.datastore.block.AbstractIndex;
++import org.carbondata.core.carbon.querystatistics.QueryStatisticsRecorder;
 +import org.carbondata.core.datastorage.store.impl.FileFactory.FileType;
 +import org.carbondata.core.keygenerator.KeyGenerator;
 +import org.carbondata.scan.filter.executer.FilterExecuter;
 +
 +/**
 + * Below class will have all the properties which needed during query execution
 + * for one block
 + */
 +public class BlockExecutionInfo {
 +
 +  /**
 +   * block on which query will be executed
 +   */
 +  private AbstractIndex blockIndex;
 +
 +  /**
 +   * each segment key size can be different and in that case we need to update
 +   * the fixed key with latest segment key generator. so this property will
 +   * tell whether this is required or not if key size is same then it is not
 +   * required
 +   */
 +  private boolean isFixedKeyUpdateRequired;
 +
 +  /**
 +   * in case of detail+order by query when number of output record is same we
 +   * need to store data in the disk, so for this check will be used to whether
 +   * we can write in the disk or not
 +   */
 +  private boolean isFileBasedQuery;
 +
 +  /**
 +   * id of the query. this will be used to create directory while writing the
 +   * data file in case of detail+order by query
 +   */
 +  private String queryId;
 +
 +  /**
 +   * this to handle limit query in case of detail query we are pushing down
 +   * the limit to executor level so based on the number of limit we can
 +   * process only that many records
 +   */
 +  private int limit;
 +
 +  /**
 +   * below to store all the information required for aggregation during query
 +   * execution
 +   */
 +  private AggregatorInfo aggregatorInfo;
 +
 +  /**
 +   * this will be used to get the first tentative block from which query
 +   * execution start, this will be useful in case of filter query to get the
 +   * start block based on filter values
 +   */
 +  private IndexKey startKey;
 +
 +  /**
 +   * this will be used to get the last tentative block till which scanning
 +   * will be done, this will be useful in case of filter query to get the last
 +   * block based on filter values
 +   */
 +  private IndexKey endKey;
 +
 +  /**
 +   * masked byte for block which will be used to unpack the fixed length key,
 +   * this will be used for updating the older block key with new block key
 +   * generator
 +   */
 +  private int[] maskedByteForBlock;
 +
 +  /**
 +   * flag to check whether query is detail query or aggregation query
 +   */
 +  private boolean isDetailQuery;
 +
 +  /**
 +   * total number of dimension in block
 +   */
 +  private int totalNumberDimensionBlock;
 +
 +  /**
 +   * total number of measure in block
 +   */
 +  private int totalNumberOfMeasureBlock;
 +
 +  /**
 +   * will be used to read the dimension block from file
 +   */
 +  private int[] allSelectedDimensionBlocksIndexes;
 +
 +  /**
 +   * will be used to read the measure block from file
 +   */
 +  private int[] allSelectedMeasureBlocksIndexes;
 +
 +  /**
 +   * this will be used to update the older block fixed length keys with the
 +   * new block fixed length key
 +   */
 +  private KeyStructureInfo keyStructureInfo;
 +
 +  /**
 +   * below will be used to sort the data based
 +   */
 +  private SortInfo sortInfo;
 +
 +  /**
 +   * first block from which query execution will start
 +   */
 +  private DataRefNode firstDataBlock;
 +
 +  /**
 +   * number of block to be scanned in the query
 +   */
 +  private long numberOfBlockToScan;
 +
 +  /**
 +   * key size of the fixed length dimension column
 +   */
 +  private int fixedLengthKeySize;
 +
 +  /**
 +   * dictionary column block indexes based on query
 +   */
 +  private int[] dictionaryColumnBlockIndex;
 +  /**
 +   * no dictionary column block indexes in based on the query order
 +   */
 +  private int[] noDictionaryBlockIndexes;
 +
 +  /**
 +   * key generator used for generating the table block fixed length key
 +   */
 +  private KeyGenerator blockKeyGenerator;
 +
 +  /**
 +   * each column value size
 +   */
 +  private int[] eachColumnValueSize;
 +
 +  /**
 +   * partition number
 +   */
 +  private String partitionId;
 +
 +  /**
 +   * column group block index in file to key structure info mapping
 +   */
 +  private Map<Integer, KeyStructureInfo> columnGroupToKeyStructureInfo;
 +
 +  /**
 +   * mapping of dictionary dimension to its dictionary mapping which will be
 +   * used to get the actual data from dictionary for aggregation, sorting
 +   */
 +  private Map<String, Dictionary> columnIdToDcitionaryMapping;
 +
 +  /**
 +   * filter tree to execute the filter
 +   */
 +  private FilterExecuter filterExecuterTree;
 +
 +  /**
 +   * fileType
 +   */
 +  private FileType fileType;
 +
 +  /**
 +   * whether it needs only raw byte records with out aggregation.
 +   */
 +  private boolean isRawRecordDetailQuery;
 +
 +  /**
 +   * whether dimensions exist in query.
 +   */
 +  private boolean isDimensionsExistInQuery;
 +
 +  /**
++   * complexParentIndexToQueryMap
++   */
++  private Map<Integer, GenericQueryType> complexParentIndexToQueryMap;
++
++  /**
++   * complex dimension parent block indexes;
++   */
++  private int[] complexColumnParentBlockIndexes;
++
++  /**
++   * to record the statistics
++   */
++  private QueryStatisticsRecorder statisticsRecorder;
++
++  /**
 +   * @return the tableBlock
 +   */
 +  public AbstractIndex getDataBlock() {
 +    return blockIndex;
 +  }
 +
 +  /**
 +   * @param blockIndex the tableBlock to set
 +   */
 +  public void setDataBlock(AbstractIndex blockIndex) {
 +    this.blockIndex = blockIndex;
 +  }
 +
 +  /**
 +   * @return the isFixedKeyUpdateRequired
 +   */
 +  public boolean isFixedKeyUpdateRequired() {
 +    return isFixedKeyUpdateRequired;
 +  }
 +
 +  /**
 +   * @param isFixedKeyUpdateRequired the isFixedKeyUpdateRequired to set
 +   */
 +  public void setFixedKeyUpdateRequired(boolean isFixedKeyUpdateRequired) {
 +    this.isFixedKeyUpdateRequired = isFixedKeyUpdateRequired;
 +  }
 +
 +  /**
 +   * @return the isFileBasedQuery
 +   */
 +  public boolean isFileBasedQuery() {
 +    return isFileBasedQuery;
 +  }
 +
 +  /**
 +   * @param isFileBasedQuery the isFileBasedQuery to set
 +   */
 +  public void setFileBasedQuery(boolean isFileBasedQuery) {
 +    this.isFileBasedQuery = isFileBasedQuery;
 +  }
 +
 +  /**
 +   * @return the queryId
 +   */
 +  public String getQueryId() {
 +    return queryId;
 +  }
 +
 +  /**
 +   * @param queryId the queryId to set
 +   */
 +  public void setQueryId(String queryId) {
 +    this.queryId = queryId;
 +  }
 +
 +  /**
 +   * @return the limit
 +   */
 +  public int getLimit() {
 +    return limit;
 +  }
 +
 +  /**
 +   * @param limit the limit to set
 +   */
 +  public void setLimit(int limit) {
 +    this.limit = limit;
 +  }
 +
 +  /**
 +   * @return the aggregatorInfos
 +   */
 +  public AggregatorInfo getAggregatorInfo() {
 +    return aggregatorInfo;
 +  }
 +
 +  /**
 +   * @param aggregatorInfo the aggregatorInfos to set
 +   */
 +  public void setAggregatorInfo(AggregatorInfo aggregatorInfo) {
 +    this.aggregatorInfo = aggregatorInfo;
 +  }
 +
 +  /**
 +   * @return the startKey
 +   */
 +  public IndexKey getStartKey() {
 +    return startKey;
 +  }
 +
 +  /**
 +   * @param startKey the startKey to set
 +   */
 +  public void setStartKey(IndexKey startKey) {
 +    this.startKey = startKey;
 +  }
 +
 +  /**
 +   * @return the endKey
 +   */
 +  public IndexKey getEndKey() {
 +    return endKey;
 +  }
 +
 +  /**
 +   * @param endKey the endKey to set
 +   */
 +  public void setEndKey(IndexKey endKey) {
 +    this.endKey = endKey;
 +  }
 +
 +  /**
 +   * @return the maskedByteForBlock
 +   */
 +  public int[] getMaskedByteForBlock() {
 +    return maskedByteForBlock;
 +  }
 +
 +  /**
 +   * @param maskedByteForBlock the maskedByteForBlock to set
 +   */
 +  public void setMaskedByteForBlock(int[] maskedByteForBlock) {
 +    this.maskedByteForBlock = maskedByteForBlock;
 +  }
 +
 +  /**
 +   * @return the isDetailQuery
 +   */
 +  public boolean isDetailQuery() {
 +    return isDetailQuery;
 +  }
 +
 +  /**
 +   * @param isDetailQuery the isDetailQuery to set
 +   */
 +  public void setDetailQuery(boolean isDetailQuery) {
 +    this.isDetailQuery = isDetailQuery;
 +  }
 +
 +  /**
 +   * @return the totalNumberDimensionBlock
 +   */
 +  public int getTotalNumberDimensionBlock() {
 +    return totalNumberDimensionBlock;
 +  }
 +
 +  /**
 +   * @param totalNumberDimensionBlock the totalNumberDimensionBlock to set
 +   */
 +  public void setTotalNumberDimensionBlock(int totalNumberDimensionBlock) {
 +    this.totalNumberDimensionBlock = totalNumberDimensionBlock;
 +  }
 +
 +  /**
 +   * @return the totalNumberOfMeasureBlock
 +   */
 +  public int getTotalNumberOfMeasureBlock() {
 +    return totalNumberOfMeasureBlock;
 +  }
 +
 +  /**
 +   * @param totalNumberOfMeasureBlock the totalNumberOfMeasureBlock to set
 +   */
 +  public void setTotalNumberOfMeasureBlock(int totalNumberOfMeasureBlock) {
 +    this.totalNumberOfMeasureBlock = totalNumberOfMeasureBlock;
 +  }
 +
 +  /**
 +   * @return the allSelectedDimensionBlocksIndexes
 +   */
 +  public int[] getAllSelectedDimensionBlocksIndexes() {
 +    return allSelectedDimensionBlocksIndexes;
 +  }
 +
 +  /**
 +   * @param allSelectedDimensionBlocksIndexes the allSelectedDimensionBlocksIndexes to set
 +   */
 +  public void setAllSelectedDimensionBlocksIndexes(int[] allSelectedDimensionBlocksIndexes) {
 +    this.allSelectedDimensionBlocksIndexes = allSelectedDimensionBlocksIndexes;
 +  }
 +
 +  /**
 +   * @return the allSelectedMeasureBlocksIndexes
 +   */
 +  public int[] getAllSelectedMeasureBlocksIndexes() {
 +    return allSelectedMeasureBlocksIndexes;
 +  }
 +
 +  /**
 +   * @param allSelectedMeasureBlocksIndexes the allSelectedMeasureBlocksIndexes to set
 +   */
 +  public void setAllSelectedMeasureBlocksIndexes(int[] allSelectedMeasureBlocksIndexes) {
 +    this.allSelectedMeasureBlocksIndexes = allSelectedMeasureBlocksIndexes;
 +  }
 +
 +  /**
 +   * @return the restructureInfos
 +   */
 +  public KeyStructureInfo getKeyStructureInfo() {
 +    return keyStructureInfo;
 +  }
 +
 +  /**
 +   * @param keyStructureInfo the restructureInfos to set
 +   */
 +  public void setKeyStructureInfo(KeyStructureInfo keyStructureInfo) {
 +    this.keyStructureInfo = keyStructureInfo;
 +  }
 +
 +  /**
 +   * @return the sortInfos
 +   */
 +  public SortInfo getSortInfo() {
 +    return sortInfo;
 +  }
 +
 +  /**
 +   * @param sortInfo the sortInfos to set
 +   */
 +  public void setSortInfo(SortInfo sortInfo) {
 +    this.sortInfo = sortInfo;
 +  }
 +
 +  /**
 +   * @return the firstDataBlock
 +   */
 +  public DataRefNode getFirstDataBlock() {
 +    return firstDataBlock;
 +  }
 +
 +  /**
 +   * @param firstDataBlock the firstDataBlock to set
 +   */
 +  public void setFirstDataBlock(DataRefNode firstDataBlock) {
 +    this.firstDataBlock = firstDataBlock;
 +  }
 +
 +  /**
 +   * @return the numberOfBlockToScan
 +   */
 +  public long getNumberOfBlockToScan() {
 +    return numberOfBlockToScan;
 +  }
 +
 +  /**
 +   * @param numberOfBlockToScan the numberOfBlockToScan to set
 +   */
 +  public void setNumberOfBlockToScan(long numberOfBlockToScan) {
 +    this.numberOfBlockToScan = numberOfBlockToScan;
 +  }
 +
 +  /**
 +   * @return the fixedLengthKeySize
 +   */
 +  public int getFixedLengthKeySize() {
 +    return fixedLengthKeySize;
 +  }
 +
 +  /**
 +   * @param fixedLengthKeySize the fixedLengthKeySize to set
 +   */
 +  public void setFixedLengthKeySize(int fixedLengthKeySize) {
 +    this.fixedLengthKeySize = fixedLengthKeySize;
 +  }
 +
 +  /**
 +   * @return the filterEvaluatorTree
 +   */
 +  public FilterExecuter getFilterExecuterTree() {
 +    return filterExecuterTree;
 +  }
 +
 +  /**
 +   * @param filterExecuterTree the filterEvaluatorTree to set
 +   */
 +  public void setFilterExecuterTree(FilterExecuter filterExecuterTree) {
 +    this.filterExecuterTree = filterExecuterTree;
 +  }
 +
 +  /**
 +   * @return the tableBlockKeyGenerator
 +   */
 +  public KeyGenerator getBlockKeyGenerator() {
 +    return blockKeyGenerator;
 +  }
 +
 +  /**
 +   * @param tableBlockKeyGenerator the tableBlockKeyGenerator to set
 +   */
 +  public void setBlockKeyGenerator(KeyGenerator tableBlockKeyGenerator) {
 +    this.blockKeyGenerator = tableBlockKeyGenerator;
 +  }
 +
 +  /**
 +   * @return the eachColumnValueSize
 +   */
 +  public int[] getEachColumnValueSize() {
 +    return eachColumnValueSize;
 +  }
 +
 +  /**
 +   * @param eachColumnValueSize the eachColumnValueSize to set
 +   */
 +  public void setEachColumnValueSize(int[] eachColumnValueSize) {
 +    this.eachColumnValueSize = eachColumnValueSize;
 +  }
 +
 +  /**
 +   * @return the partitionId
 +   */
 +  public String getPartitionId() {
 +    return partitionId;
 +  }
 +
 +  /**
 +   * @param partitionId the partitionId to set
 +   */
 +  public void setPartitionId(String partitionId) {
 +    this.partitionId = partitionId;
 +  }
 +
 +  /**
 +   * @return the dictionaryColumnBlockIndex
 +   */
 +  public int[] getDictionaryColumnBlockIndex() {
 +    return dictionaryColumnBlockIndex;
 +  }
 +
 +  /**
 +   * @param dictionaryColumnBlockIndex the dictionaryColumnBlockIndex to set
 +   */
 +  public void setDictionaryColumnBlockIndex(int[] dictionaryColumnBlockIndex) {
 +    this.dictionaryColumnBlockIndex = dictionaryColumnBlockIndex;
 +  }
 +
 +  /**
 +   * @return the noDictionaryBlockIndexes
 +   */
 +  public int[] getNoDictionaryBlockIndexes() {
 +    return noDictionaryBlockIndexes;
 +  }
 +
 +  /**
 +   * @param noDictionaryBlockIndexes the noDictionaryBlockIndexes to set
 +   */
 +  public void setNoDictionaryBlockIndexes(int[] noDictionaryBlockIndexes) {
 +    this.noDictionaryBlockIndexes = noDictionaryBlockIndexes;
 +  }
 +
 +  /**
 +   * @return the columnGroupToKeyStructureInfo
 +   */
 +  public Map<Integer, KeyStructureInfo> getColumnGroupToKeyStructureInfo() {
 +    return columnGroupToKeyStructureInfo;
 +  }
 +
 +  /**
 +   * @param columnGroupToKeyStructureInfo the columnGroupToKeyStructureInfo to set
 +   */
 +  public void setColumnGroupToKeyStructureInfo(
 +      Map<Integer, KeyStructureInfo> columnGroupToKeyStructureInfo) {
 +    this.columnGroupToKeyStructureInfo = columnGroupToKeyStructureInfo;
 +  }
 +
 +  /**
 +   * @return the columnIdToDcitionaryMapping
 +   */
 +  public Map<String, Dictionary> getColumnIdToDcitionaryMapping() {
 +    return columnIdToDcitionaryMapping;
 +  }
 +
 +  /**
 +   * @param columnIdToDcitionaryMapping the columnIdToDcitionaryMapping to set
 +   */
 +  public void setColumnIdToDcitionaryMapping(Map<String, Dictionary> columnIdToDcitionaryMapping) {
 +    this.columnIdToDcitionaryMapping = columnIdToDcitionaryMapping;
 +  }
 +
 +  /**
 +   * @return the fileType
 +   */
 +  public FileType getFileType() {
 +    return fileType;
 +  }
 +
 +  /**
 +   * @param fileType the fileType to set
 +   */
 +  public void setFileType(FileType fileType) {
 +    this.fileType = fileType;
 +  }
 +
 +  public boolean isRawRecordDetailQuery() {
 +    return isRawRecordDetailQuery;
 +  }
 +
 +  public void setRawRecordDetailQuery(boolean rawRecordDetailQuery) {
 +    isRawRecordDetailQuery = rawRecordDetailQuery;
 +  }
 +
 +  public boolean isDimensionsExistInQuery() {
 +    return isDimensionsExistInQuery;
 +  }
 +
 +  public void setDimensionsExistInQuery(boolean dimensionsExistInQuery) {
 +    isDimensionsExistInQuery = dimensionsExistInQuery;
 +  }
++
++  /**
++   * @return the complexParentIndexToQueryMap
++   */
++  public Map<Integer, GenericQueryType> getComlexDimensionInfoMap() {
++    return complexParentIndexToQueryMap;
++  }
++
++  /**
++   * @param complexDimensionInfoMap the complexParentIndexToQueryMap to set
++   */
++  public void setComplexDimensionInfoMap(Map<Integer, GenericQueryType> complexDimensionInfoMap) {
++    this.complexParentIndexToQueryMap = complexDimensionInfoMap;
++  }
++
++  /**
++   * @return the complexColumnParentBlockIndexes
++   */
++  public int[] getComplexColumnParentBlockIndexes() {
++    return complexColumnParentBlockIndexes;
++  }
++
++  /**
++   * @param complexColumnParentBlockIndexes the complexColumnParentBlockIndexes to set
++   */
++  public void setComplexColumnParentBlockIndexes(int[] complexColumnParentBlockIndexes) {
++    this.complexColumnParentBlockIndexes = complexColumnParentBlockIndexes;
++  }
++
++  public QueryStatisticsRecorder getStatisticsRecorder() {
++    return statisticsRecorder;
++  }
++
++  public void setStatisticsRecorder(QueryStatisticsRecorder statisticsRecorder) {
++    this.statisticsRecorder = statisticsRecorder;
++  }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/executor/util/QueryUtil.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/executor/util/QueryUtil.java
index d3423b0,0000000..34760b6
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/executor/util/QueryUtil.java
+++ b/core/src/main/java/org/carbondata/scan/executor/util/QueryUtil.java
@@@ -1,755 -1,0 +1,931 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.carbondata.scan.executor.util;
 +
 +import java.util.ArrayList;
 +import java.util.Arrays;
 +import java.util.Collections;
 +import java.util.Comparator;
 +import java.util.HashMap;
 +import java.util.HashSet;
 +import java.util.Iterator;
 +import java.util.List;
 +import java.util.Map;
 +import java.util.Map.Entry;
 +import java.util.Set;
 +import java.util.TreeSet;
 +
 +import org.carbondata.core.cache.Cache;
 +import org.carbondata.core.cache.CacheProvider;
 +import org.carbondata.core.cache.CacheType;
 +import org.carbondata.core.cache.dictionary.Dictionary;
 +import org.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
 +import org.carbondata.core.carbon.AbsoluteTableIdentifier;
 +import org.carbondata.core.carbon.CarbonTableIdentifier;
 +import org.carbondata.core.carbon.datastore.block.SegmentProperties;
 +import org.carbondata.core.carbon.metadata.CarbonMetadata;
++import org.carbondata.core.carbon.metadata.datatype.DataType;
 +import org.carbondata.core.carbon.metadata.encoder.Encoding;
 +import org.carbondata.core.carbon.metadata.schema.table.CarbonTable;
 +import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
 +import org.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
 +import org.carbondata.core.constants.CarbonCommonConstants;
 +import org.carbondata.core.keygenerator.KeyGenException;
 +import org.carbondata.core.keygenerator.KeyGenerator;
 +import org.carbondata.core.util.CarbonUtil;
 +import org.carbondata.core.util.CarbonUtilException;
++import org.carbondata.query.complex.querytypes.ArrayQueryType;
++import org.carbondata.query.complex.querytypes.PrimitiveQueryType;
++import org.carbondata.query.complex.querytypes.StructQueryType;
 +import org.carbondata.scan.executor.exception.QueryExecutionException;
 +import org.carbondata.scan.executor.infos.KeyStructureInfo;
++import org.carbondata.scan.expression.ColumnExpression;
++import org.carbondata.scan.expression.Expression;
++import org.carbondata.scan.expression.logical.BinaryLogicalExpression;
++import org.carbondata.scan.filter.GenericQueryType;
++import org.carbondata.scan.filter.resolver.FilterResolverIntf;
 +import org.carbondata.scan.filter.resolver.resolverinfo.DimColumnResolvedFilterInfo;
 +import org.carbondata.scan.model.QueryDimension;
 +import org.carbondata.scan.model.QueryMeasure;
 +import org.carbondata.scan.model.QueryModel;
 +
 +import org.apache.commons.lang3.ArrayUtils;
 +
 +/**
 + * Utility class for query execution
 + */
 +public class QueryUtil {
 +
 +  /**
 +   * Below method will be used to get the masked byte range based on the query
 +   * dimension. It will give the range in the mdkey. This will be used to get
 +   * the actual key array from masked mdkey
 +   *
 +   * @param queryDimensions query dimension selected in query
 +   * @param keyGenerator    key generator
 +   * @return masked key
 +   */
 +  public static int[] getMaskedByteRange(List<QueryDimension> queryDimensions,
 +      KeyGenerator keyGenerator) {
 +    Set<Integer> byteRangeSet = new TreeSet<Integer>();
 +    int[] byteRange = null;
 +    for (int i = 0; i < queryDimensions.size(); i++) {
 +
 +      // as no dictionary column and complex type columns
 +      // are not selected in the mdkey
 +      // so we will not select the those dimension for calculating the
 +      // range
 +      if (queryDimensions.get(i).getDimension().getKeyOrdinal() == -1) {
 +        continue;
 +      }
 +      // get the offset of the dimension in the mdkey
 +      byteRange =
 +          keyGenerator.getKeyByteOffsets(queryDimensions.get(i).getDimension().getKeyOrdinal());
 +      for (int j = byteRange[0]; j <= byteRange[1]; j++) {
 +        byteRangeSet.add(j);
 +      }
 +    }
 +    int[] maksedByteRange = new int[byteRangeSet.size()];
 +    int index = 0;
 +    Iterator<Integer> iterator = byteRangeSet.iterator();
 +    // add the masked byte range
 +    while (iterator.hasNext()) {
 +      maksedByteRange[index++] = iterator.next();
 +    }
 +    return maksedByteRange;
 +  }
 +
 +  public static int[] getMaskedByteRangeBasedOrdinal(List<Integer> ordinals,
 +      KeyGenerator keyGenerator) {
 +    Set<Integer> byteRangeSet = new TreeSet<Integer>();
 +    int[] byteRange = null;
 +    for (int i = 0; i < ordinals.size(); i++) {
 +
 +      // get the offset of the dimension in the mdkey
 +      byteRange = keyGenerator.getKeyByteOffsets(ordinals.get(i));
 +      for (int j = byteRange[0]; j <= byteRange[1]; j++) {
 +        byteRangeSet.add(j);
 +      }
 +    }
 +    int[] maksedByteRange = new int[byteRangeSet.size()];
 +    int index = 0;
 +    Iterator<Integer> iterator = byteRangeSet.iterator();
 +    // add the masked byte range
 +    while (iterator.hasNext()) {
 +      maksedByteRange[index++] = iterator.next();
 +    }
 +    return maksedByteRange;
 +  }
 +
 +  /**
 +   * Below method will return the max key based on the dimension ordinal
 +   *
 +   * @param keyOrdinalList
 +   * @param generator
 +   * @return
 +   * @throws KeyGenException
 +   */
 +  public static byte[] getMaxKeyBasedOnOrinal(List<Integer> keyOrdinalList, KeyGenerator generator)
 +      throws KeyGenException {
 +    long[] max = new long[generator.getDimCount()];
 +    Arrays.fill(max, 0L);
 +
 +    for (int i = 0; i < keyOrdinalList.size(); i++) {
 +      // adding for dimension which is selected in query
 +      max[keyOrdinalList.get(i)] = Long.MAX_VALUE;
 +    }
 +    return generator.generateKey(max);
 +  }
 +
 +  /**
 +   * To get the max key based on dimensions. i.e. all other dimensions will be
 +   * set to 0 bits and the required query dimension will be masked with all
 +   * LONG.MAX so that we can mask key and then compare while aggregating This
 +   * can be useful during filter query when only few dimensions were selected
 +   * out of row group
 +   *
 +   * @param queryDimensions dimension selected in query
 +   * @param generator       key generator
 +   * @return max key for dimension
 +   * @throws KeyGenException if any problem while generating the key
 +   */
 +  public static byte[] getMaxKeyBasedOnDimensions(List<QueryDimension> queryDimensions,
 +      KeyGenerator generator) throws KeyGenException {
 +    long[] max = new long[generator.getDimCount()];
 +    Arrays.fill(max, 0L);
 +
 +    for (int i = 0; i < queryDimensions.size(); i++) {
 +      // as no dictionary column and complex type columns
 +      // are not selected in the mdkey
 +      // so we will not select the those dimension for calculating the
 +      // range
 +      if (queryDimensions.get(i).getDimension().getKeyOrdinal() == -1) {
 +        continue;
 +      }
 +      // adding for dimension which is selected in query
 +      max[queryDimensions.get(i).getDimension().getKeyOrdinal()] = Long.MAX_VALUE;
 +    }
 +
 +    return generator.generateKey(max);
 +  }
 +
 +  /**
 +   * Below method will be used to get the masked key for query
 +   *
 +   * @param keySize         size of the masked key
 +   * @param maskedKeyRanges masked byte range
 +   * @return masked bytes
 +   */
 +  public static int[] getMaskedByte(int keySize, int[] maskedKeyRanges) {
 +    int[] maskedKey = new int[keySize];
 +    // all the non selected dimension will be filled with -1
 +    Arrays.fill(maskedKey, -1);
 +    for (int i = 0; i < maskedKeyRanges.length; i++) {
 +      maskedKey[maskedKeyRanges[i]] = i;
 +    }
 +    return maskedKey;
 +  }
 +
 +  /**
 +   * Below method will be used to get the dimension block index in file based
 +   * on query dimension
 +   *
 +   * @param queryDimensions                query dimension
 +   * @param dimensionOrdinalToBlockMapping mapping of dimension block in file to query dimension
 +   * @return block index of file
 +   */
 +  public static int[] getDimensionsBlockIndexes(List<QueryDimension> queryDimensions,
 +      Map<Integer, Integer> dimensionOrdinalToBlockMapping,
 +      List<CarbonDimension> customAggregationDimension) {
 +    // using set as in row group columns will point to same block
 +    Set<Integer> dimensionBlockIndex = new HashSet<Integer>();
++    int blockIndex = 0;
 +    for (int i = 0; i < queryDimensions.size(); i++) {
-       dimensionBlockIndex.add(
-           dimensionOrdinalToBlockMapping.get(queryDimensions.get(i).getDimension().getOrdinal()));
++      blockIndex =
++          dimensionOrdinalToBlockMapping.get(queryDimensions.get(i).getDimension().getOrdinal());
++      dimensionBlockIndex.add(blockIndex);
++      addChildrenBlockIndex(blockIndex, dimensionBlockIndex, queryDimensions.get(i).getDimension());
 +    }
 +    for (int i = 0; i < customAggregationDimension.size(); i++) {
-       dimensionBlockIndex
-           .add(dimensionOrdinalToBlockMapping.get(customAggregationDimension.get(i).getOrdinal()));
++      blockIndex =
++          dimensionOrdinalToBlockMapping.get(customAggregationDimension.get(i).getOrdinal());
++      dimensionBlockIndex.add(blockIndex);
++      addChildrenBlockIndex(blockIndex, dimensionBlockIndex, customAggregationDimension.get(i));
 +    }
 +    return ArrayUtils
 +        .toPrimitive(dimensionBlockIndex.toArray(new Integer[dimensionBlockIndex.size()]));
 +  }
 +
 +  /**
++   * Below method will be used to add the children block index
++   * this will be basically for complex dimension which will have children
++   *
++   * @param startBlockIndex start block index
++   * @param blockIndexList  block index list
++   * @param dimension       parent dimension
++   */
++  private static void addChildrenBlockIndex(int startBlockIndex, Set<Integer> blockIndexList,
++      CarbonDimension dimension) {
++    for (int i = 0; i < dimension.numberOfChild(); i++) {
++      blockIndexList.add(++startBlockIndex);
++      addChildrenBlockIndex(startBlockIndex, blockIndexList,
++          dimension.getListOfChildDimensions().get(i));
++    }
++  }
++
++  /**
 +   * Below method will be used to get the dictionary mapping for all the
 +   * dictionary encoded dimension present in the query
 +   *
 +   * @param queryDimensions            query dimension present in the query this will be used to
 +   *                                   convert the result from surrogate key to actual data
 +   * @param absoluteTableIdentifier    absolute table identifier
 +   * @return dimension unique id to its dictionary map
 +   * @throws QueryExecutionException
 +   */
 +  public static Map<String, Dictionary> getDimensionDictionaryDetail(
 +      List<QueryDimension> queryDimensions,
++      Set<CarbonDimension> filterComplexDimensions,
 +      AbsoluteTableIdentifier absoluteTableIdentifier) throws QueryExecutionException {
 +    // to store dimension unique column id list, this is required as
 +    // dimension can be present in
 +    // query dimension, as well as some aggregation function will be applied
 +    // in the same dimension
 +    // so we need to get only one instance of dictionary
 +    // direct dictionary skip is done only for the dictionary lookup
 +    Set<String> dictionaryDimensionFromQuery = new HashSet<String>();
 +    for (int i = 0; i < queryDimensions.size(); i++) {
 +      List<Encoding> encodingList = queryDimensions.get(i).getDimension().getEncoder();
++      // TODO need to remove the data type check for parent column in complex type no need to
++      // write encoding dictionary
 +      if (CarbonUtil.hasEncoding(encodingList, Encoding.DICTIONARY) && !CarbonUtil
 +          .hasEncoding(encodingList, Encoding.DIRECT_DICTIONARY)) {
-         dictionaryDimensionFromQuery.add(queryDimensions.get(i).getDimension().getColumnId());
++
++        if (queryDimensions.get(i).getDimension().numberOfChild() == 0) {
++          dictionaryDimensionFromQuery.add(queryDimensions.get(i).getDimension().getColumnId());
++        }
++        if (queryDimensions.get(i).getDimension().numberOfChild() > 0) {
++          getChildDimensionDictionaryDetail(queryDimensions.get(i).getDimension(),
++              dictionaryDimensionFromQuery);
++        }
 +      }
 +    }
++    Iterator<CarbonDimension> iterator = filterComplexDimensions.iterator();
++    while (iterator.hasNext()) {
++      getChildDimensionDictionaryDetail(iterator.next(), dictionaryDimensionFromQuery);
++    }
 +    // converting to list as api exposed needed list which i think
 +    // is not correct
 +    List<String> dictionaryColumnIdList =
 +        new ArrayList<String>(dictionaryDimensionFromQuery.size());
 +    dictionaryColumnIdList.addAll(dictionaryDimensionFromQuery);
 +    return getDictionaryMap(dictionaryColumnIdList, absoluteTableIdentifier);
 +  }
 +
 +  /**
++   * Below method will be used to fill the children dimension column id
++   *
++   * @param queryDimensions              query dimension
++   * @param dictionaryDimensionFromQuery dictionary dimension for query
++   */
++  private static void getChildDimensionDictionaryDetail(CarbonDimension queryDimensions,
++      Set<String> dictionaryDimensionFromQuery) {
++    for (int j = 0; j < queryDimensions.numberOfChild(); j++) {
++      List<Encoding> encodingList = queryDimensions.getListOfChildDimensions().get(j).getEncoder();
++      if (queryDimensions.getListOfChildDimensions().get(j).numberOfChild() > 0) {
++        getChildDimensionDictionaryDetail(queryDimensions.getListOfChildDimensions().get(j),
++            dictionaryDimensionFromQuery);
++      } else if(!CarbonUtil.hasEncoding(encodingList, Encoding.DIRECT_DICTIONARY)) {
++        dictionaryDimensionFromQuery
++            .add(queryDimensions.getListOfChildDimensions().get(j).getColumnId());
++      }
++    }
++  }
++
++  /**
 +   * Below method will be used to get the column id to its dictionary mapping
 +   *
 +   * @param dictionaryColumnIdList  dictionary column list
 +   * @param absoluteTableIdentifier absolute table identifier
 +   * @return dictionary mapping
 +   * @throws QueryExecutionException
 +   */
 +  private static Map<String, Dictionary> getDictionaryMap(List<String> dictionaryColumnIdList,
 +      AbsoluteTableIdentifier absoluteTableIdentifier) throws QueryExecutionException {
 +    // this for dictionary unique identifier
 +    List<DictionaryColumnUniqueIdentifier> dictionaryColumnUniqueIdentifiers =
 +        getDictionaryColumnUniqueIdentifierList(dictionaryColumnIdList,
 +            absoluteTableIdentifier.getCarbonTableIdentifier());
 +    CacheProvider cacheProvider = CacheProvider.getInstance();
 +    Cache forwardDictionaryCache = cacheProvider
 +        .createCache(CacheType.FORWARD_DICTIONARY, absoluteTableIdentifier.getStorePath());
 +    List<Dictionary> columnDictionaryList = null;
 +    try {
 +      columnDictionaryList = forwardDictionaryCache.getAll(dictionaryColumnUniqueIdentifiers);
 +    } catch (CarbonUtilException e) {
 +      throw new QueryExecutionException(e);
 +    }
 +    Map<String, Dictionary> columnDictionaryMap = new HashMap<>(columnDictionaryList.size());
 +    for (int i = 0; i < dictionaryColumnUniqueIdentifiers.size(); i++) {
 +      // TODO: null check for column dictionary, if cache size is less it
 +      // might return null here, in that case throw exception
 +      columnDictionaryMap.put(dictionaryColumnIdList.get(i), columnDictionaryList.get(i));
 +    }
 +    return columnDictionaryMap;
 +  }
 +
 +  /**
 +   * Below method will be used to get the dictionary column unique identifier
 +   *
 +   * @param dictionaryColumnIdList dictionary
 +   * @param carbonTableIdentifier
 +   * @return
 +   */
 +  private static List<DictionaryColumnUniqueIdentifier> getDictionaryColumnUniqueIdentifierList(
 +      List<String> dictionaryColumnIdList, CarbonTableIdentifier carbonTableIdentifier)
 +      throws QueryExecutionException {
 +    CarbonTable carbonTable =
 +        CarbonMetadata.getInstance().getCarbonTable(carbonTableIdentifier.getTableUniqueName());
 +    List<DictionaryColumnUniqueIdentifier> dictionaryColumnUniqueIdentifiers =
 +        new ArrayList<>(dictionaryColumnIdList.size());
 +    for (String columnId : dictionaryColumnIdList) {
 +      CarbonDimension dimension = CarbonMetadata.getInstance()
 +          .getCarbonDimensionBasedOnColIdentifier(carbonTable, columnId);
 +      if (null == dimension) {
-         throw new QueryExecutionException(
-             "The column id " + columnId + " could not be resolved.");
++        throw new QueryExecutionException("The column id " + columnId + " could not be resolved.");
 +      }
 +      DictionaryColumnUniqueIdentifier dictionaryColumnUniqueIdentifier =
 +          new DictionaryColumnUniqueIdentifier(carbonTableIdentifier,
 +              dimension.getColumnIdentifier(), dimension.getDataType());
 +      dictionaryColumnUniqueIdentifiers.add(dictionaryColumnUniqueIdentifier);
 +    }
 +    return dictionaryColumnUniqueIdentifiers;
 +  }
 +
 +  /**
 +   * Below method will used to get the method will be used to get the measure
 +   * block indexes to be read from the file
 +   *
 +   * @param queryMeasures              query measure
 +   * @param expressionMeasure          measure present in the expression
 +   * @param ordinalToBlockIndexMapping measure ordinal to block mapping
 +   * @return block indexes
 +   */
 +  public static int[] getMeasureBlockIndexes(List<QueryMeasure> queryMeasures,
 +      List<CarbonMeasure> expressionMeasure, Map<Integer, Integer> ordinalToBlockIndexMapping) {
 +    Set<Integer> measureBlockIndex = new HashSet<Integer>();
 +    for (int i = 0; i < queryMeasures.size(); i++) {
 +      measureBlockIndex
 +          .add(ordinalToBlockIndexMapping.get(queryMeasures.get(i).getMeasure().getOrdinal()));
 +    }
 +    for (int i = 0; i < expressionMeasure.size(); i++) {
 +      measureBlockIndex.add(ordinalToBlockIndexMapping.get(expressionMeasure.get(i).getOrdinal()));
 +    }
 +    return ArrayUtils.toPrimitive(measureBlockIndex.toArray(new Integer[measureBlockIndex.size()]));
 +  }
 +
 +  /**
 +   * Below method will be used to get the masked byte range for dimension
 +   * which is present in order by
 +   *
 +   * @param orderByDimensions order by dimension
 +   * @param generator         key generator
 +   * @param maskedRanges      masked byte range for dimension
 +   * @return range of masked byte for order by dimension
 +   */
 +  public static int[][] getMaskedByteRangeForSorting(List<QueryDimension> orderByDimensions,
 +      KeyGenerator generator, int[] maskedRanges) {
 +    int[][] dimensionCompareIndex = new int[orderByDimensions.size()][];
 +    int index = 0;
 +    for (int i = 0; i < dimensionCompareIndex.length; i++) {
 +      Set<Integer> integers = new TreeSet<Integer>();
 +      if (!orderByDimensions.get(i).getDimension().getEncoder().contains(Encoding.DICTIONARY)
 +          || orderByDimensions.get(i).getDimension().numberOfChild() > 0) {
 +        continue;
 +      }
 +      int[] range =
 +          generator.getKeyByteOffsets(orderByDimensions.get(i).getDimension().getKeyOrdinal());
 +      for (int j = range[0]; j <= range[1]; j++) {
 +        integers.add(j);
 +      }
 +      dimensionCompareIndex[index] = new int[integers.size()];
 +      int j = 0;
 +      for (Iterator<Integer> iterator = integers.iterator(); iterator.hasNext(); ) {
 +        Integer integer = (Integer) iterator.next();
 +        dimensionCompareIndex[index][j++] = integer.intValue();
 +      }
 +      index++;
 +    }
 +    for (int i = 0; i < dimensionCompareIndex.length; i++) {
 +      if (null == dimensionCompareIndex[i]) {
 +        continue;
 +      }
 +      int[] range = dimensionCompareIndex[i];
 +      if (null != range) {
 +        for (int j = 0; j < range.length; j++) {
 +          for (int k = 0; k < maskedRanges.length; k++) {
 +            if (range[j] == maskedRanges[k]) {
 +              range[j] = k;
 +              break;
 +            }
 +          }
 +        }
 +      }
 +
 +    }
 +    return dimensionCompareIndex;
 +  }
 +
 +  /**
 +   * Below method will be used to get the masked key for sorting
 +   *
 +   * @param orderDimensions           query dimension
 +   * @param generator                 key generator
 +   * @param maskedByteRangeForSorting masked byte range for sorting
 +   * @param maskedRanges              masked range
 +   * @return masked byte range
 +   * @throws QueryExecutionException
 +   */
 +  public static byte[][] getMaksedKeyForSorting(List<QueryDimension> orderDimensions,
 +      KeyGenerator generator, int[][] maskedByteRangeForSorting, int[] maskedRanges)
 +      throws QueryExecutionException {
 +    byte[][] maskedKey = new byte[orderDimensions.size()][];
 +    byte[] mdKey = null;
 +    long[] key = null;
 +    byte[] maskedMdKey = null;
 +    try {
 +      if (null != maskedByteRangeForSorting) {
 +        for (int i = 0; i < maskedByteRangeForSorting.length; i++) {
 +          if (null == maskedByteRangeForSorting[i]) {
 +            continue;
 +          }
 +          key = new long[generator.getDimCount()];
 +          maskedKey[i] = new byte[maskedByteRangeForSorting[i].length];
 +          key[orderDimensions.get(i).getDimension().getKeyOrdinal()] = Long.MAX_VALUE;
 +          mdKey = generator.generateKey(key);
 +          maskedMdKey = new byte[maskedRanges.length];
 +          for (int k = 0; k < maskedMdKey.length; k++) { // CHECKSTYLE:OFF
 +            // Approval
 +            // No:Approval-V1R2C10_001
 +            maskedMdKey[k] = mdKey[maskedRanges[k]];
 +          }
 +          for (int j = 0; j < maskedByteRangeForSorting[i].length; j++) {
 +            maskedKey[i][j] = maskedMdKey[maskedByteRangeForSorting[i][j]];
 +          }// CHECKSTYLE:ON
 +
 +        }
 +      }
 +    } catch (KeyGenException e) {
 +      throw new QueryExecutionException(e);
 +    }
 +    return maskedKey;
 +  }
 +
 +  /**
 +   * Below method will be used to get mapping whether dimension is present in
 +   * order by or not
 +   *
 +   * @param sortedDimensions sort dimension present in order by query
 +   * @param queryDimensions  query dimension
 +   * @return sort dimension indexes
 +   */
 +  public static byte[] getSortDimensionIndexes(List<QueryDimension> sortedDimensions,
 +      List<QueryDimension> queryDimensions) {
 +    byte[] sortedDims = new byte[queryDimensions.size()];
 +    int indexOf = 0;
 +    for (int i = 0; i < sortedDims.length; i++) {
 +      indexOf = sortedDimensions.indexOf(queryDimensions.get(i));
 +      if (indexOf > -1) {
 +        sortedDims[i] = 1;
 +      }
 +    }
 +    return sortedDims;
 +  }
 +
 +  /**
 +   * Below method will be used to get the mapping of block index and its
 +   * restructuring info
 +   *
 +   * @param queryDimensions   query dimension from query model
 +   * @param segmentProperties segment properties
 +   * @return map of block index to its restructuring info
 +   * @throws KeyGenException if problem while key generation
 +   */
 +  public static Map<Integer, KeyStructureInfo> getColumnGroupKeyStructureInfo(
 +      List<QueryDimension> queryDimensions, SegmentProperties segmentProperties)
 +      throws KeyGenException {
 +    Map<Integer, KeyStructureInfo> rowGroupToItsRSInfo = new HashMap<Integer, KeyStructureInfo>();
 +    // get column group id and its ordinal mapping of column group
 +    Map<Integer, List<Integer>> columnGroupAndItsOrdinalMappingForQuery =
 +        getColumnGroupAndItsOrdinalMapping(queryDimensions);
 +    Map<Integer, KeyGenerator> columnGroupAndItsKeygenartor =
 +        segmentProperties.getColumnGroupAndItsKeygenartor();
 +
 +    Iterator<Entry<Integer, List<Integer>>> iterator =
 +        columnGroupAndItsOrdinalMappingForQuery.entrySet().iterator();
 +    KeyStructureInfo restructureInfos = null;
 +    while (iterator.hasNext()) {
 +      Entry<Integer, List<Integer>> next = iterator.next();
 +      KeyGenerator keyGenerator = columnGroupAndItsKeygenartor.get(next.getKey());
 +      restructureInfos = new KeyStructureInfo();
 +      // sort the ordinal
 +      List<Integer> ordinal = next.getValue();
 +      List<Integer> mdKeyOrdinal = new ArrayList<Integer>();
 +      for (Integer ord : ordinal) {
 +        mdKeyOrdinal.add(segmentProperties.getColumnGroupMdKeyOrdinal(next.getKey(), ord));
 +      }
 +      Collections.sort(mdKeyOrdinal);
 +      // get the masked byte range for column group
 +      int[] maskByteRanges = getMaskedByteRangeBasedOrdinal(mdKeyOrdinal, keyGenerator);
 +      // max key for column group
 +      byte[] maxKey = getMaxKeyBasedOnOrinal(mdKeyOrdinal, keyGenerator);
 +      // get masked key for column group
 +      int[] maksedByte = getMaskedByte(keyGenerator.getKeySizeInBytes(), maskByteRanges);
 +      restructureInfos.setKeyGenerator(keyGenerator);
 +      restructureInfos.setMaskByteRanges(maskByteRanges);
 +      restructureInfos.setMaxKey(maxKey);
 +      restructureInfos.setMaskedBytes(maksedByte);
 +      rowGroupToItsRSInfo
 +          .put(segmentProperties.getDimensionOrdinalToBlockMapping().get(ordinal.get(0)),
 +              restructureInfos);
 +    }
 +    return rowGroupToItsRSInfo;
 +  }
 +
 +  /**
 +   * return true if given key is found in array
 +   *
 +   * @param data
 +   * @param key
 +   * @return
 +   */
 +  public static boolean searchInArray(int[] data, int key) {
 +    for (int i = 0; i < data.length; i++) {
 +      if (key == data[i]) {
 +        return true;
 +      }
 +    }
 +    return false;
 +  }
 +
 +  /**
 +   * Below method will be used to create a mapping of column group columns
 +   * this mapping will have column group id to all the dimension ordinal
 +   * present in the column group This mapping will be used during query
 +   * execution, to create a mask key for the column group dimension which will
 +   * be used in aggregation and filter query as column group dimension will be
 +   * stored in bit level
 +   */
 +  private static Map<Integer, List<Integer>> getColumnGroupAndItsOrdinalMapping(
 +      List<QueryDimension> origdimensions) {
 +
 +    List<QueryDimension> dimensions = new ArrayList<QueryDimension>(origdimensions.size());
 +    dimensions.addAll(origdimensions);
 +    /**
 +     * sort based on column group id
 +     */
 +    Collections.sort(dimensions, new Comparator<QueryDimension>() {
 +
 +      @Override public int compare(QueryDimension o1, QueryDimension o2) {
 +        return Integer
 +            .compare(o1.getDimension().columnGroupId(), o2.getDimension().columnGroupId());
 +      }
 +    });
 +    // list of row groups this will store all the row group column
 +    Map<Integer, List<Integer>> columnGroupAndItsOrdinalsMapping =
 +        new HashMap<Integer, List<Integer>>();
 +    // to store a column group
 +    List<Integer> currentColumnGroup = null;
 +    // current index
 +    int index = 0;
 +    // previous column group to check all the column of row id has bee
 +    // selected
 +    int prvColumnGroupId = -1;
 +    while (index < dimensions.size()) {
 +      // if dimension group id is not zero and it is same as the previous
 +      // column group id
 +      // then we need to add ordinal of that column as it belongs to same
 +      // column group
 +      if (!dimensions.get(index).getDimension().isColumnar()
 +          && dimensions.get(index).getDimension().columnGroupId() == prvColumnGroupId
 +          && null != currentColumnGroup) {
 +        currentColumnGroup.add(dimensions.get(index).getDimension().getOrdinal());
 +      }
 +
 +      // if dimension is not a columnar then it is column group column
 +      else if (!dimensions.get(index).getDimension().isColumnar()) {
 +        currentColumnGroup = new ArrayList<Integer>();
 +        columnGroupAndItsOrdinalsMapping
 +            .put(dimensions.get(index).getDimension().columnGroupId(), currentColumnGroup);
 +        currentColumnGroup.add(dimensions.get(index).getDimension().getOrdinal());
 +      }
 +      // update the row id every time,this is required to group the
 +      // columns
 +      // of the same row group
 +      prvColumnGroupId = dimensions.get(index).getDimension().columnGroupId();
 +      index++;
 +    }
 +    return columnGroupAndItsOrdinalsMapping;
 +  }
 +
 +  /**
 +   * Below method will be used to get masked byte
 +   *
 +   * @param data           actual data
 +   * @param maxKey         max key
 +   * @param maskByteRanges mask byte range
 +   * @param byteCount
 +   * @return masked byte
 +   */
 +  public static byte[] getMaskedKey(byte[] data, byte[] maxKey, int[] maskByteRanges,
 +      int byteCount) {
 +    byte[] maskedKey = new byte[byteCount];
 +    int counter = 0;
 +    int byteRange = 0;
 +    for (int i = 0; i < byteCount; i++) {
 +      byteRange = maskByteRanges[i];
 +      if (byteRange != -1) {
 +        maskedKey[counter++] = (byte) (data[byteRange] & maxKey[byteRange]);
 +      }
 +    }
 +    return maskedKey;
 +  }
 +
 +  /**
 +   * Below method will be used to fill block indexes of the query dimension
 +   * which will be used in creating a output row Here is method we are passing
 +   * two list which store the indexes one for dictionary column other for not
 +   * dictionary column. This is done for specific purpose so that in one
 +   * iteration we will be able to fill both type dimension block indexes
 +   *
 +   * @param queryDimensions                  dimension present in the query
 +   * @param columnOrdinalToBlockIndexMapping column ordinal to block index mapping
 +   * @param dictionaryDimensionBlockIndex    list to store dictionary column block indexes
 +   * @param noDictionaryDimensionBlockIndex  list to store no dictionary block indexes
 +   */
 +  public static void fillQueryDimensionsBlockIndexes(List<QueryDimension> queryDimensions,
 +      Map<Integer, Integer> columnOrdinalToBlockIndexMapping,
 +      Set<Integer> dictionaryDimensionBlockIndex, List<Integer> noDictionaryDimensionBlockIndex) {
 +    for (QueryDimension queryDimension : queryDimensions) {
-       if (CarbonUtil.hasEncoding(queryDimension.getDimension().getEncoder(), Encoding.DICTIONARY)) {
++      if (CarbonUtil.hasEncoding(queryDimension.getDimension().getEncoder(), Encoding.DICTIONARY)
++          && queryDimension.getDimension().numberOfChild() == 0) {
 +        dictionaryDimensionBlockIndex
 +            .add(columnOrdinalToBlockIndexMapping.get(queryDimension.getDimension().getOrdinal()));
-       } else {
++      } else if(queryDimension.getDimension().numberOfChild() == 0){
 +        noDictionaryDimensionBlockIndex
 +            .add(columnOrdinalToBlockIndexMapping.get(queryDimension.getDimension().getOrdinal()));
 +      }
 +    }
 +  }
 +
 +  /**
 +   * Below method will be used to resolve the query model
 +   * resolve will be setting the actual dimension and measure object
 +   * as from driver only column name will be passes to avoid the heavy object
 +   * serialization
 +   *
 +   * @param queryModel query model
 +   */
 +  public static void resolveQueryModel(QueryModel queryModel) {
 +    CarbonMetadata.getInstance().addCarbonTable(queryModel.getTable());
 +    // TODO need to load the table from table identifier
 +    CarbonTable carbonTable = queryModel.getTable();
 +    String tableName =
 +        queryModel.getAbsoluteTableIdentifier().getCarbonTableIdentifier().getTableName();
 +    // resolve query dimension
 +    for (QueryDimension queryDimension : queryModel.getQueryDimension()) {
 +      queryDimension
 +          .setDimension(carbonTable.getDimensionByName(tableName, queryDimension.getColumnName()));
 +    }
 +    // resolve sort dimension
 +    for (QueryDimension sortDimension : queryModel.getSortDimension()) {
 +      sortDimension
 +          .setDimension(carbonTable.getDimensionByName(tableName, sortDimension.getColumnName()));
 +    }
 +    // resolve query measure
 +    for (QueryMeasure queryMeasure : queryModel.getQueryMeasures()) {
 +      // in case of count start column name will  be count * so
 +      // first need to check any measure is present or not and as if measure
 +      // if measure is present and if first measure is not a default
 +      // measure than add measure otherwise
 +      // than add first dimension as a measure
 +      //as currently if measure is not present then
 +      //we are adding default measure so first condition will
 +      //never come false but if in future we can remove so not removing first if check
 +      if (queryMeasure.getColumnName().equals("count(*)")) {
 +        if (carbonTable.getMeasureByTableName(tableName).size() > 0 && !carbonTable
 +            .getMeasureByTableName(tableName).get(0).getColName()
 +            .equals(CarbonCommonConstants.DEFAULT_INVISIBLE_DUMMY_MEASURE)) {
 +          queryMeasure.setMeasure(carbonTable.getMeasureByTableName(tableName).get(0));
 +        } else {
 +          CarbonMeasure dummyMeasure = new CarbonMeasure(
 +              carbonTable.getDimensionByTableName(tableName).get(0).getColumnSchema(), 0);
 +          queryMeasure.setMeasure(dummyMeasure);
 +        }
 +      } else {
 +        queryMeasure
 +            .setMeasure(carbonTable.getMeasureByName(tableName, queryMeasure.getColumnName()));
 +      }
 +    }
 +  }
 +
 +  /**
 +   * Below method will be used to get the index of number type aggregator
 +   *
 +   * @param aggType
 +   * @return index in aggregator
 +   */
 +  public static int[] getNumberTypeIndex(List<String> aggType) {
 +    List<Integer> indexList = new ArrayList<Integer>();
 +    for (int i = 0; i < aggType.size(); i++) {
 +      if (CarbonCommonConstants.SUM.equals(aggType.get(i)) || CarbonCommonConstants.AVERAGE
 +          .equals(aggType.get(i))) {
 +        indexList.add(i);
 +      }
 +    }
 +    return ArrayUtils.toPrimitive(indexList.toArray(new Integer[indexList.size()]));
 +  }
 +
 +  /**
 +   * below method will be used to get the actual type aggregator
 +   *
 +   * @param aggType
 +   * @return index in aggrgetor
 +   */
 +  public static int[] getActualTypeIndex(List<String> aggType) {
 +    List<Integer> indexList = new ArrayList<Integer>();
 +    for (int i = 0; i < aggType.size(); i++) {
 +      if (!CarbonCommonConstants.SUM.equals(aggType.get(i)) && !CarbonCommonConstants.AVERAGE
 +          .equals(aggType.get(i))) {
 +        indexList.add(i);
 +      }
 +    }
 +    return ArrayUtils.toPrimitive(indexList.toArray(new Integer[indexList.size()]));
 +  }
 +
 +  /**
-    * It is required for extracting column data from columngroup chunk
++   * Below method will be used to get the key structure for the column group
 +   *
-    * @return
++   * @param segmentProperties      segment properties
++   * @param dimColumnEvaluatorInfo dimension evaluator info
++   * @return key structure info for column group dimension
 +   * @throws KeyGenException
 +   */
 +  public static KeyStructureInfo getKeyStructureInfo(SegmentProperties segmentProperties,
 +      DimColumnResolvedFilterInfo dimColumnEvaluatorInfo) throws KeyGenException {
 +    int colGrpId = getColumnGroupId(segmentProperties, dimColumnEvaluatorInfo.getColumnIndex());
 +    KeyGenerator keyGenerator = segmentProperties.getColumnGroupAndItsKeygenartor().get(colGrpId);
 +    List<Integer> mdKeyOrdinal = new ArrayList<Integer>();
 +
 +    mdKeyOrdinal.add(segmentProperties
 +        .getColumnGroupMdKeyOrdinal(colGrpId, dimColumnEvaluatorInfo.getColumnIndex()));
 +    int[] maskByteRanges = QueryUtil.getMaskedByteRangeBasedOrdinal(mdKeyOrdinal, keyGenerator);
 +    byte[] maxKey = QueryUtil.getMaxKeyBasedOnOrinal(mdKeyOrdinal, keyGenerator);
 +    int[] maksedByte = QueryUtil.getMaskedByte(keyGenerator.getKeySizeInBytes(), maskByteRanges);
 +    KeyStructureInfo restructureInfos = new KeyStructureInfo();
 +    restructureInfos.setKeyGenerator(keyGenerator);
 +    restructureInfos.setMaskByteRanges(maskByteRanges);
 +    restructureInfos.setMaxKey(maxKey);
 +    restructureInfos.setMaskedBytes(maksedByte);
 +    return restructureInfos;
 +  }
 +
++  /**
++   * Below method will be used to get the column group id based on the ordinal
++   *
++   * @param segmentProperties segment properties
++   * @param ordinal           ordinal to be searched
++   * @return column group id
++   */
 +  public static int getColumnGroupId(SegmentProperties segmentProperties, int ordinal) {
 +    int[][] columnGroups = segmentProperties.getColumnGroups();
 +    int colGrpId = -1;
 +    for (int i = 0; i < columnGroups.length; i++) {
 +      if (columnGroups[i].length > 1) {
 +        colGrpId++;
 +        if (QueryUtil.searchInArray(columnGroups[i], ordinal)) {
 +          break;
 +        }
 +      }
 +    }
 +    return colGrpId;
 +  }
++
++  /**
++   * Below method will be used to get the map of for complex dimension and its type
++   * which will be used to during query execution to
++   *
++   * @param queryDimensions          complex dimension in query
++   * @param dimensionToBlockIndexMap dimension to block index in file map
++   * @return complex dimension and query type
++   */
++  public static Map<Integer, GenericQueryType> getComplexDimensionsMap(
++      List<QueryDimension> queryDimensions, Map<Integer, Integer> dimensionToBlockIndexMap,
++      int[] eachComplexColumnValueSize, Map<String, Dictionary> columnIdToDictionaryMap,
++      Set<CarbonDimension> filterDimensions) {
++    Map<Integer, GenericQueryType> complexTypeMap = new HashMap<Integer, GenericQueryType>();
++    for (QueryDimension dimension : queryDimensions) {
++      CarbonDimension actualDimension = dimension.getDimension();
++      if (actualDimension.getNumberOfChild() == 0) {
++        continue;
++      }
++      fillParentDetails(dimensionToBlockIndexMap, actualDimension, complexTypeMap,
++          eachComplexColumnValueSize, columnIdToDictionaryMap);
++    }
++    if (null != filterDimensions) {
++      for (CarbonDimension filterDimension : filterDimensions) {
++        fillParentDetails(dimensionToBlockIndexMap, filterDimension, complexTypeMap,
++            eachComplexColumnValueSize, columnIdToDictionaryMap);
++      }
++    }
++    return complexTypeMap;
++  }
++
++  private static GenericQueryType fillParentDetails(Map<Integer, Integer> dimensionToBlockIndexMap,
++      CarbonDimension dimension, Map<Integer, GenericQueryType> complexTypeMap,
++      int[] eachComplexColumnValueSize, Map<String, Dictionary> columnIdToDictionaryMap) {
++    int parentBlockIndex = dimensionToBlockIndexMap.get(dimension.getOrdinal());
++    GenericQueryType parentQueryType = dimension.getDataType().equals(DataType.ARRAY) ?
++        new ArrayQueryType(dimension.getColName(), dimension.getColName(), parentBlockIndex) :
++        new StructQueryType(dimension.getColName(), dimension.getColName(),
++            dimensionToBlockIndexMap.get(dimension.getOrdinal()));
++    complexTypeMap.put(dimension.getOrdinal(), parentQueryType);
++    parentBlockIndex =
++        fillChildrenDetails(eachComplexColumnValueSize, columnIdToDictionaryMap, parentBlockIndex,
++            dimension, parentQueryType);
++    return parentQueryType;
++  }
++
++  private static int fillChildrenDetails(int[] eachComplexColumnValueSize,
++      Map<String, Dictionary> columnIdToDictionaryMap, int parentBlockIndex,
++      CarbonDimension dimension, GenericQueryType parentQueryType) {
++    for (int i = 0; i < dimension.getNumberOfChild(); i++) {
++      switch (dimension.getListOfChildDimensions().get(i).getDataType()) {
++        case ARRAY:
++          parentQueryType.addChildren(
++              new ArrayQueryType(dimension.getListOfChildDimensions().get(i).getColName(),
++                  dimension.getColName(), ++parentBlockIndex));
++          break;
++        case STRUCT:
++          parentQueryType.addChildren(
++              new StructQueryType(dimension.getListOfChildDimensions().get(i).getColName(),
++                  dimension.getColName(), ++parentBlockIndex));
++          break;
++        default:
++          boolean isDirectDictionary = CarbonUtil.hasEncoding(
++              dimension.getListOfChildDimensions().get(i).getEncoder(),
++              Encoding.DIRECT_DICTIONARY);
++          parentQueryType.addChildren(
++              new PrimitiveQueryType(dimension.getListOfChildDimensions().get(i).getColName(),
++                  dimension.getColName(), ++parentBlockIndex,
++                  dimension.getListOfChildDimensions().get(i).getDataType(),
++                  eachComplexColumnValueSize[dimension.getListOfChildDimensions().get(i)
++                      .getComplexTypeOrdinal()], columnIdToDictionaryMap
++                  .get(dimension.getListOfChildDimensions().get(i).getColumnId()),
++                  isDirectDictionary));
++      }
++      if (dimension.getListOfChildDimensions().get(i).getNumberOfChild() > 0) {
++        parentBlockIndex = fillChildrenDetails(eachComplexColumnValueSize, columnIdToDictionaryMap,
++            parentBlockIndex, dimension.getListOfChildDimensions().get(i), parentQueryType);
++      }
++    }
++    return parentBlockIndex;
++  }
++
++  public static Set<CarbonDimension> getAllFilterDimensions(FilterResolverIntf filterResolverTree) {
++    Set<CarbonDimension> filterDimensions = new HashSet<CarbonDimension>();
++    if (null == filterResolverTree) {
++      return filterDimensions;
++    }
++    List<ColumnExpression> dimensionResolvedInfos = new ArrayList<ColumnExpression>();
++    Expression filterExpression = filterResolverTree.getFilterExpression();
++    if (filterExpression instanceof BinaryLogicalExpression) {
++      BinaryLogicalExpression logicalExpression = (BinaryLogicalExpression) filterExpression;
++      dimensionResolvedInfos.addAll(logicalExpression.getColumnList());
++    }
++    for (ColumnExpression info : dimensionResolvedInfos) {
++      if (info.isDimension() && info.getDimension().getNumberOfChild() > 0) {
++        filterDimensions.add(info.getDimension());
++      }
++    }
++    return filterDimensions;
++
++  }
++
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/executor/util/RestructureUtil.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/executor/util/RestructureUtil.java
index 92b469c,0000000..e1dafe1
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/executor/util/RestructureUtil.java
+++ b/core/src/main/java/org/carbondata/scan/executor/util/RestructureUtil.java
@@@ -1,128 -1,0 +1,135 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +package org.carbondata.scan.executor.util;
 +
 +import java.util.ArrayList;
 +import java.util.List;
 +
 +import org.carbondata.core.carbon.metadata.schema.table.column.CarbonDimension;
 +import org.carbondata.core.carbon.metadata.schema.table.column.CarbonMeasure;
 +import org.carbondata.core.constants.CarbonCommonConstants;
 +import org.carbondata.scan.executor.infos.AggregatorInfo;
 +import org.carbondata.scan.model.QueryDimension;
 +import org.carbondata.scan.model.QueryMeasure;
 +
 +/**
 + * Utility class for restructuring
 + */
 +public class RestructureUtil {
 +
 +  /**
 +   * Below method will be used to get the updated query dimension updation
 +   * means, after restructuring some dimension will be not present in older
 +   * table blocks in that case we need to select only those dimension out of
 +   * query dimension which is present in the current table block
 +   *
 +   * @param queryDimensions
 +   * @param tableBlockDimensions
 +   * @return list of query dimension which is present in the table block
 +   */
-   public static List<QueryDimension> getUpdatedQueryDimension(
-       List<QueryDimension> queryDimensions, List<CarbonDimension> tableBlockDimensions) {
++  public static List<QueryDimension> getUpdatedQueryDimension(List<QueryDimension> queryDimensions,
++      List<CarbonDimension> tableBlockDimensions, List<CarbonDimension> tableComplexDimension) {
 +    List<QueryDimension> presentDimension =
 +        new ArrayList<QueryDimension>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
 +    // selecting only those dimension which is present in the query
 +    for (QueryDimension queryDimimension : queryDimensions) {
 +      for (CarbonDimension tableDimension : tableBlockDimensions) {
 +        if (tableDimension.equals(queryDimimension.getDimension())) {
 +          presentDimension.add(queryDimimension);
 +        }
 +      }
 +    }
++    for (QueryDimension queryDimimension : queryDimensions) {
++      for (CarbonDimension tableDimension : tableComplexDimension) {
++        if (tableDimension.equals(queryDimimension.getDimension())) {
++          presentDimension.add(queryDimimension);
++        }
++      }
++    }
 +    return presentDimension;
 +  }
 +
 +  /**
 +   * Below method is to add dimension children for complex type dimension as
 +   * internally we are creating dimension column for each each complex
 +   * dimension so when complex query dimension request will come in the query,
 +   * we need to add its children as it is hidden from the user For example if
 +   * complex dimension is of Array of String[2] so we are storing 3 dimension
 +   * and when user will query for complex type i.e. array type we need to add
 +   * its children and then we will read respective block and create a tuple
 +   * based on all three dimension
 +   *
 +   * @param queryDimensions      current query dimensions
 +   * @param tableBlockDimensions dimensions which is present in the table block
 +   * @return updated dimension(after adding complex type children)
 +   */
 +  public static List<CarbonDimension> addChildrenForComplexTypeDimension(
 +      List<CarbonDimension> queryDimensions, List<CarbonDimension> tableBlockDimensions) {
 +    List<CarbonDimension> updatedQueryDimension = new ArrayList<CarbonDimension>();
 +    int numberOfChildren = 0;
 +    for (CarbonDimension queryDimension : queryDimensions) {
 +      // if number of child is zero, then it is not a complex dimension
 +      // so directly add it query dimension
 +      if (queryDimension.numberOfChild() == 0) {
 +        updatedQueryDimension.add(queryDimension);
 +      }
 +      // if number of child is more than 1 then add all its children
 +      numberOfChildren = queryDimension.getOrdinal() + queryDimension.numberOfChild();
 +      for (int j = queryDimension.getOrdinal(); j < numberOfChildren; j++) {
 +        updatedQueryDimension.add(tableBlockDimensions.get(j));
 +      }
 +    }
 +    return updatedQueryDimension;
 +  }
 +
 +  /**
 +   * Below method will be used to get the aggregator info object
 +   * in this method some of the properties which will be extracted
 +   * from query measure and current block measures will be set
 +   *
 +   * @param queryMeasures        measures present in query
 +   * @param currentBlockMeasures current block measures
 +   * @return aggregator info
 +   */
 +  public static AggregatorInfo getAggregatorInfos(List<QueryMeasure> queryMeasures,
 +      List<CarbonMeasure> currentBlockMeasures) {
 +    AggregatorInfo aggregatorInfos = new AggregatorInfo();
 +    int numberOfMeasureInQuery = queryMeasures.size();
 +    int[] measureOrdinals = new int[numberOfMeasureInQuery];
 +    Object[] defaultValues = new Object[numberOfMeasureInQuery];
 +    boolean[] measureExistsInCurrentBlock = new boolean[numberOfMeasureInQuery];
 +    int index = 0;
 +    for (QueryMeasure queryMeasure : queryMeasures) {
 +      measureOrdinals[index] = queryMeasure.getMeasure().getOrdinal();
 +      // if query measure exists in current dimension measures
 +      // then setting measure exists is true
 +      // otherwise adding a default value of a measure
 +      if (currentBlockMeasures.contains(queryMeasure.getMeasure())) {
 +        measureExistsInCurrentBlock[index] = true;
 +      } else {
 +        defaultValues[index] = queryMeasure.getMeasure().getDefaultValue();
 +      }
 +      index++;
 +    }
 +    aggregatorInfos.setDefaultValues(defaultValues);
 +    aggregatorInfos.setMeasureOrdinals(measureOrdinals);
 +    aggregatorInfos.setMeasureExists(measureExistsInCurrentBlock);
 +    return aggregatorInfos;
 +  }
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/expression/ExpressionResult.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/expression/ExpressionResult.java
index c90ed06,0000000..0ad39f6
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/expression/ExpressionResult.java
+++ b/core/src/main/java/org/carbondata/scan/expression/ExpressionResult.java
@@@ -1,468 -1,0 +1,472 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additiona   l information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.carbondata.scan.expression;
 +
 +import java.math.BigDecimal;
 +import java.sql.Timestamp;
 +import java.text.ParseException;
 +import java.text.SimpleDateFormat;
 +import java.util.ArrayList;
 +import java.util.Date;
 +import java.util.List;
 +
 +import org.carbondata.core.carbon.metadata.datatype.DataType;
 +import org.carbondata.core.constants.CarbonCommonConstants;
 +import org.carbondata.core.util.CarbonProperties;
 +import org.carbondata.scan.expression.exception.FilterIllegalMemberException;
 +
 +public class ExpressionResult implements Comparable<ExpressionResult> {
 +
 +  private static final long serialVersionUID = 1L;
 +  protected DataType dataType;
 +
 +  protected Object value;
 +
 +  private List<ExpressionResult> expressionResults;
 +
 +  public ExpressionResult(DataType dataType, Object value) {
 +    this.dataType = dataType;
 +    this.value = value;
 +  }
 +
 +  public ExpressionResult(List<ExpressionResult> expressionResults) {
 +    this.expressionResults = expressionResults;
 +  }
 +
 +  public void set(DataType dataType, Object value) {
 +    this.dataType = dataType;
 +    this.value = value;
 +    this.expressionResults = null;
 +  }
 +
 +  public DataType getDataType() {
 +    return dataType;
 +  }
 +
 +  //CHECKSTYLE:OFF Approval No:Approval-V1R2C10_009
 +  public Integer getInt() throws FilterIllegalMemberException {
 +    if (value == null) {
 +      return null;
 +    }
 +    try {
 +      switch (this.getDataType()) {
 +        case STRING:
 +          try {
 +            return Integer.parseInt(value.toString());
 +          } catch (NumberFormatException e) {
 +            throw new FilterIllegalMemberException(e);
 +          }
 +        case SHORT:
 +          return ((Short) value).intValue();
 +        case INT:
 +        case DOUBLE:
 +          if (value instanceof Double) {
 +            return ((Double) value).intValue();
 +          }
 +          return (Integer) value;
 +        case TIMESTAMP:
 +          if (value instanceof Timestamp) {
 +            return (int) (((Timestamp) value).getTime() % 1000);
 +          } else {
 +            return (Integer) value;
 +          }
 +        default:
 +          throw new FilterIllegalMemberException(
 +              "Cannot convert" + this.getDataType().name() + " to integer type value");
 +      }
 +
 +    } catch (ClassCastException e) {
 +      throw new FilterIllegalMemberException(
 +          "Cannot convert" + this.getDataType().name() + " to Integer type value");
 +    }
 +  }
 +
 +  public Short getShort() throws FilterIllegalMemberException {
 +    if (value == null) {
 +      return null;
 +    }
 +    try {
 +      switch (this.getDataType()) {
 +        case STRING:
 +          try {
 +            return Short.parseShort(value.toString());
 +          } catch (NumberFormatException e) {
 +            throw new FilterIllegalMemberException(e);
 +          }
 +        case SHORT:
 +        case INT:
 +        case DOUBLE:
 +
 +          if (value instanceof Double) {
 +            return ((Double) value).shortValue();
 +          } else if (value instanceof Integer) {
 +            return ((Integer) value).shortValue();
 +          }
 +          return (Short) value;
 +
 +        case TIMESTAMP:
 +
 +          if (value instanceof Timestamp) {
 +            return (short) (((Timestamp) value).getTime() % 1000);
 +          } else {
 +            return (Short) value;
 +          }
 +
 +        default:
 +          throw new FilterIllegalMemberException(
 +              "Cannot convert" + this.getDataType().name() + " to integer type value");
 +      }
 +
 +    } catch (ClassCastException e) {
 +      throw new FilterIllegalMemberException(
 +          "Cannot convert" + this.getDataType().name() + " to Integer type value");
 +    }
 +  }
 +
 +  public String getString() throws FilterIllegalMemberException {
 +    if (value == null) {
 +      return null;
 +    }
 +    try {
 +      switch (this.getDataType()) {
 +        case TIMESTAMP:
 +          SimpleDateFormat parser = new SimpleDateFormat(CarbonProperties.getInstance()
 +              .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
 +                  CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
 +          if (value instanceof Timestamp) {
 +            return parser.format((Timestamp) value);
 +          } else {
 +            return parser.format(new Timestamp((long) value / 1000));
 +          }
 +
 +        default:
 +          return value.toString();
 +      }
 +    } catch (Exception e) {
 +      throw new FilterIllegalMemberException(
 +          "Cannot convert" + this.getDataType().name() + " to String type value");
 +    }
 +  }
 +
 +  public Double getDouble() throws FilterIllegalMemberException {
 +    if (value == null) {
 +      return null;
 +    }
 +    try {
 +      switch (this.getDataType()) {
 +        case STRING:
 +          try {
 +            return Double.parseDouble(value.toString());
 +          } catch (NumberFormatException e) {
 +            throw new FilterIllegalMemberException(e);
 +          }
 +        case SHORT:
 +          return ((Short) value).doubleValue();
 +        case INT:
 +          return ((Integer) value).doubleValue();
 +        case LONG:
 +          return ((Long) value).doubleValue();
 +        case DOUBLE:
 +          return (Double) value;
 +        case TIMESTAMP:
 +          if (value instanceof Timestamp) {
 +            return (double) ((Timestamp) value).getTime() * 1000;
 +          } else {
 +            return (Double) (value);
 +          }
 +        default:
 +          throw new FilterIllegalMemberException(
 +              "Cannot convert" + this.getDataType().name() + " to double type value");
 +      }
 +    } catch (ClassCastException e) {
 +      throw new FilterIllegalMemberException(
 +          "Cannot convert" + this.getDataType().name() + " to Double type value");
 +    }
 +  }
 +  //CHECKSTYLE:ON
 +
 +  public Long getLong() throws FilterIllegalMemberException {
 +    if (value == null) {
 +      return null;
 +    }
 +    try {
 +      switch (this.getDataType()) {
 +        case STRING:
 +          try {
 +            return Long.parseLong(value.toString());
 +          } catch (NumberFormatException e) {
 +            throw new FilterIllegalMemberException(e);
 +          }
 +        case SHORT:
 +          return ((Short) value).longValue();
 +        case INT:
 +          return (Long) value;
 +        case LONG:
 +          return (Long) value;
 +        case DOUBLE:
 +          return (Long) value;
 +        case TIMESTAMP:
 +          if (value instanceof Timestamp) {
 +            return 1000 * ((Timestamp) value).getTime();
 +          } else {
 +            return (Long) value;
 +          }
 +        default:
 +          throw new FilterIllegalMemberException(
 +              "Cannot convert" + this.getDataType().name() + " to Long type value");
 +      }
 +    } catch (ClassCastException e) {
 +      throw new FilterIllegalMemberException(
 +          "Cannot convert" + this.getDataType().name() + " to Long type value");
 +    }
 +
 +  }
 +
 +  //Add to judge for BigDecimal
 +  public BigDecimal getDecimal() throws FilterIllegalMemberException {
 +    if (value == null) {
 +      return null;
 +    }
 +    try {
 +      switch (this.getDataType()) {
 +        case STRING:
 +          try {
 +            return new BigDecimal(value.toString());
 +          } catch (NumberFormatException e) {
 +            throw new FilterIllegalMemberException(e);
 +          }
 +        case SHORT:
 +          return new BigDecimal((short) value);
 +        case INT:
 +          return new BigDecimal((int) value);
 +        case LONG:
 +          return new BigDecimal((long) value);
 +        case DOUBLE:
-           return new BigDecimal((double) value);
++          return new BigDecimal(value.toString());
 +        case DECIMAL:
 +          return new BigDecimal(value.toString());
 +        case TIMESTAMP:
 +          if (value instanceof Timestamp) {
 +            return new BigDecimal(1000 * ((Timestamp) value).getTime());
 +          } else {
 +            return new BigDecimal((long) value);
 +          }
 +        default:
 +          throw new FilterIllegalMemberException(
 +              "Cannot convert" + this.getDataType().name() + " to Long type value");
 +      }
 +    } catch (ClassCastException e) {
 +      throw new FilterIllegalMemberException(
 +          "Cannot convert" + this.getDataType().name() + " to Long type value");
 +    }
 +
 +  }
 +
 +  public Long getTime() throws FilterIllegalMemberException {
 +    if (value == null) {
 +      return null;
 +    }
 +    try {
 +      switch (this.getDataType()) {
 +        case STRING:
 +          // Currently the query engine layer only supports yyyy-MM-dd HH:mm:ss date format
 +          // no matter in which format the data is been stored, so while retrieving the direct
 +          // surrogate value for filter member first it should be converted in date form as per
 +          // above format and needs to retrieve time stamp.
 +          SimpleDateFormat parser =
 +              new SimpleDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
 +          Date dateToStr;
 +          try {
 +            dateToStr = parser.parse(value.toString());
 +            return dateToStr.getTime() * 1000;
 +          } catch (ParseException e) {
 +            throw new FilterIllegalMemberException(
 +                "Cannot convert" + this.getDataType().name() + " to Time/Long type value");
 +          }
 +        case SHORT:
 +          return ((Short) value).longValue();
 +        case INT:
 +        case LONG:
 +          return (Long) value;
 +        case DOUBLE:
 +          return (Long) value;
 +        case TIMESTAMP:
 +          if (value instanceof Timestamp) {
 +            return ((Timestamp) value).getTime() * 1000;
 +          } else {
 +            return (Long) value;
 +          }
 +        default:
 +          throw new FilterIllegalMemberException(
 +              "Cannot convert" + this.getDataType().name() + " to Time/Long type value");
 +      }
 +    } catch (ClassCastException e) {
 +      throw new FilterIllegalMemberException(
 +          "Cannot convert" + this.getDataType().name() + " to Time/Long type value");
 +    }
 +
 +  }
 +
 +  public Boolean getBoolean() throws FilterIllegalMemberException {
 +    if (value == null) {
 +      return null;
 +    }
 +    try {
 +      switch (this.getDataType()) {
 +        case STRING:
 +          try {
 +            return Boolean.parseBoolean(value.toString());
 +          } catch (NumberFormatException e) {
 +            throw new FilterIllegalMemberException(e);
 +          }
 +
 +        case BOOLEAN:
 +          return Boolean.parseBoolean(value.toString());
 +
 +        default:
 +          throw new FilterIllegalMemberException(
 +              "Cannot convert" + this.getDataType().name() + " to boolean type value");
 +      }
 +    } catch (ClassCastException e) {
 +      throw new FilterIllegalMemberException(
 +          "Cannot convert" + this.getDataType().name() + " to Boolean type value");
 +    }
 +  }
 +
 +  public List<ExpressionResult> getList() {
 +    if (null == expressionResults) {
 +      List<ExpressionResult> a = new ArrayList<ExpressionResult>(20);
 +      a.add(new ExpressionResult(dataType, value));
 +      return a;
 +    } else {
 +      return expressionResults;
 +    }
 +  }
 +
 +  public List<String> getListAsString() throws FilterIllegalMemberException {
 +    List<String> evaluateResultListFinal = new ArrayList<String>(20);
 +    List<ExpressionResult> evaluateResultList = getList();
 +    for (ExpressionResult result : evaluateResultList) {
 +      if (result.getString() == null) {
 +        evaluateResultListFinal.add(CarbonCommonConstants.MEMBER_DEFAULT_VAL);
 +        continue;
 +      }
 +      evaluateResultListFinal.add(result.getString());
 +    }
 +    return evaluateResultListFinal;
 +  }
 +
 +  @Override public int hashCode() {
 +    final int prime = 31;
 +    int result = 1;
 +    if (null != expressionResults) {
 +      result = prime * result + expressionResults.hashCode();
 +    } else if (null != value) {
 +      result = prime * result + value.toString().hashCode();
 +    } else {
 +      result = prime * result + "".hashCode();
 +    }
 +
 +    return result;
 +  }
 +
 +  @Override public boolean equals(Object obj) {
 +    if (!(obj instanceof ExpressionResult)) {
 +      return false;
 +    }
 +    if (this == obj) {
 +      return true;
 +    }
 +    if (getClass() != obj.getClass()) {
 +      return false;
 +    }
 +    ExpressionResult objToCompare = (ExpressionResult) obj;
 +    boolean result = false;
 +    if (this.value == objToCompare.value) {
 +      return true;
 +    }
 +    try {
 +      switch (this.getDataType()) {
 +        case STRING:
 +          result = this.getString().equals(objToCompare.getString());
 +          break;
 +        case SHORT:
 +          result = this.getShort().equals(objToCompare.getShort());
 +          break;
 +        case INT:
 +          result = this.getInt().equals(objToCompare.getInt());
 +          break;
++        case LONG:
++        case TIMESTAMP:
++          result = this.getLong().equals(objToCompare.getLong());
++          break;
 +        case DOUBLE:
 +          result = this.getDouble().equals(objToCompare.getDouble());
 +          break;
-         case TIMESTAMP:
-           result = this.getLong().equals(objToCompare.getLong());
++        case DECIMAL:
++          result = this.getDecimal().equals(objToCompare.getDecimal());
 +          break;
 +        default:
 +          break;
 +      }
 +    } catch (FilterIllegalMemberException ex) {
 +      return false;
 +    }
 +
 +    return result;
 +  }
 +
 +  public boolean isNull() {
 +    return value == null;
 +  }
 +
 +  @Override public int compareTo(ExpressionResult o) {
 +    try {
 +      switch (o.dataType) {
 +        case SHORT:
 +        case INT:
 +        case LONG:
 +        case DOUBLE:
 +          Double d1 = this.getDouble();
 +          Double d2 = o.getDouble();
 +          return d1.compareTo(d2);
 +        case DECIMAL:
 +          java.math.BigDecimal val1 = this.getDecimal();
 +          java.math.BigDecimal val2 = o.getDecimal();
 +          return val1.compareTo(val2);
 +        case TIMESTAMP:
 +          SimpleDateFormat parser = new SimpleDateFormat(CarbonProperties.getInstance()
 +              .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
 +                  CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT));
 +          Date date1 = null;
 +          Date date2 = null;
 +          date1 = parser.parse(this.getString());
 +          date2 = parser.parse(o.getString());
 +          return date1.compareTo(date2);
 +        case STRING:
 +        default:
 +          return this.getString().compareTo(o.getString());
 +      }
 +    } catch (Exception e) {
 +      return -1;
 +    }
 +  }
 +
 +}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/eaecb651/core/src/main/java/org/carbondata/scan/expression/conditional/EqualToExpression.java
----------------------------------------------------------------------
diff --cc core/src/main/java/org/carbondata/scan/expression/conditional/EqualToExpression.java
index 18d492d,0000000..30b1916
mode 100644,000000..100644
--- a/core/src/main/java/org/carbondata/scan/expression/conditional/EqualToExpression.java
+++ b/core/src/main/java/org/carbondata/scan/expression/conditional/EqualToExpression.java
@@@ -1,99 -1,0 +1,108 @@@
 +/*
 + * Licensed to the Apache Software Foundation (ASF) under one
 + * or more contributor license agreements.  See the NOTICE file
 + * distributed with this work for additional information
 + * regarding copyright ownership.  The ASF licenses this file
 + * to you under the Apache License, Version 2.0 (the
 + * "License"); you may not use this file except in compliance
 + * with the License.  You may obtain a copy of the License at
 + *
 + *    http://www.apache.org/licenses/LICENSE-2.0
 + *
 + * Unless required by applicable law or agreed to in writing,
 + * software distributed under the License is distributed on an
 + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
 + * KIND, either express or implied.  See the License for the
 + * specific language governing permissions and limitations
 + * under the License.
 + */
 +
 +package org.carbondata.scan.expression.conditional;
 +
 +import org.carbondata.core.carbon.metadata.datatype.DataType;
 +import org.carbondata.scan.expression.Expression;
 +import org.carbondata.scan.expression.ExpressionResult;
 +import org.carbondata.scan.expression.exception.FilterIllegalMemberException;
 +import org.carbondata.scan.expression.exception.FilterUnsupportedException;
 +import org.carbondata.scan.filter.intf.ExpressionType;
 +import org.carbondata.scan.filter.intf.RowIntf;
 +
 +public class EqualToExpression extends BinaryConditionalExpression {
 +
 +  private static final long serialVersionUID = 1L;
++  private boolean isNull;
 +
 +  public EqualToExpression(Expression left, Expression right) {
 +    super(left, right);
 +  }
 +
++  public EqualToExpression(Expression left, Expression right, boolean isNull) {
++    super(left, right);
++    this.isNull = isNull;
++  }
++
 +  @Override public ExpressionResult evaluate(RowIntf value)
 +      throws FilterUnsupportedException, FilterIllegalMemberException {
 +    ExpressionResult elRes = left.evaluate(value);
 +    ExpressionResult erRes = right.evaluate(value);
 +
 +    boolean result = false;
 +
 +    ExpressionResult val1 = elRes;
 +    ExpressionResult val2 = erRes;
 +
 +    if (elRes.isNull() || erRes.isNull()) {
-       result = elRes.isNull() && erRes.isNull();
-       val1.set(DataType.BOOLEAN, result);
-       return val1;
++      if (isNull) {
++        elRes.set(DataType.BOOLEAN, elRes.isNull() == erRes.isNull());
++      } else {
++        elRes.set(DataType.BOOLEAN, false);
++      }
++      return elRes;
 +    }
 +    //default implementation if the data types are different for the resultsets
 +    if (elRes.getDataType() != erRes.getDataType()) {
 +      if (elRes.getDataType().getPresedenceOrder() < erRes.getDataType().getPresedenceOrder()) {
 +        val2 = elRes;
 +        val1 = erRes;
 +      }
 +    }
 +
 +    switch (val1.getDataType()) {
 +      case STRING:
 +        result = val1.getString().equals(val2.getString());
 +        break;
 +      case SHORT:
 +        result = val1.getShort().equals(val2.getShort());
 +        break;
 +      case INT:
 +        result = val1.getInt().equals(val2.getInt());
 +        break;
 +      case DOUBLE:
 +        result = val1.getDouble().equals(val2.getDouble());
 +        break;
 +      case TIMESTAMP:
 +        result = val1.getTime().equals(val2.getTime());
 +        break;
 +      case LONG:
 +        result = val1.getLong().equals(val2.getLong());
 +        break;
 +      case DECIMAL:
 +        result = val1.getDecimal().compareTo(val2.getDecimal()) == 0;
 +        break;
 +      default:
 +        throw new FilterUnsupportedException(
 +            "DataType: " + val1.getDataType() + " not supported for the filter expression");
 +    }
 +    val1.set(DataType.BOOLEAN, result);
 +    return val1;
 +  }
 +
 +  @Override public ExpressionType getFilterExpressionType() {
 +    return ExpressionType.EQUALS;
 +  }
 +
 +  @Override public String getString() {
 +    return "EqualTo(" + left.getString() + ',' + right.getString() + ')';
 +  }
 +
 +}



Mime
View raw message