carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gvram...@apache.org
Subject [3/8] carbondata git commit: [CARBONDATA-2720] Remove dead code
Date Thu, 12 Jul 2018 06:55:10 GMT
http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMap.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMap.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMap.java
deleted file mode 100644
index 77b5347..0000000
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMap.java
+++ /dev/null
@@ -1,238 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.datamap.lucene;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-
-import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.datamap.dev.DataMapModel;
-import org.apache.carbondata.core.datamap.dev.cgdatamap.CoarseGrainDataMap;
-import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.datastore.impl.FileFactory;
-import org.apache.carbondata.core.indexstore.Blocklet;
-import org.apache.carbondata.core.indexstore.PartitionSpec;
-import org.apache.carbondata.core.memory.MemoryException;
-import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
-
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.lucene.analysis.Analyzer;
-import org.apache.lucene.analysis.standard.StandardAnalyzer;
-import org.apache.lucene.document.Document;
-import org.apache.lucene.index.DirectoryReader;
-import org.apache.lucene.index.IndexReader;
-import org.apache.lucene.index.IndexableField;
-import org.apache.lucene.queryparser.classic.MultiFieldQueryParser;
-import org.apache.lucene.queryparser.classic.ParseException;
-import org.apache.lucene.queryparser.classic.QueryParser;
-import org.apache.lucene.search.IndexSearcher;
-import org.apache.lucene.search.Query;
-import org.apache.lucene.search.ScoreDoc;
-import org.apache.lucene.search.TopDocs;
-import org.apache.lucene.store.Directory;
-import org.apache.solr.store.hdfs.HdfsDirectory;
-
-@InterfaceAudience.Internal
-public class LuceneCoarseGrainDataMap extends CoarseGrainDataMap {
-
-  /**
-   * log information
-   */
-  private static final LogService LOGGER =
-      LogServiceFactory.getLogService(LuceneCoarseGrainDataMap.class.getName());
-
-  public static final int BLOCKID_ID = 0;
-
-  public static final int BLOCKLETID_ID = 1;
-
-  public static final int PAGEID_ID = 2;
-
-  public static final int ROWID_ID = 3;
-  /**
-   * searcher object for this datamap
-   */
-  private IndexSearcher indexSearcher = null;
-
-  /**
-   * default max values to return
-   */
-  private static int MAX_RESULT_NUMBER = 100;
-
-  /**
-   * analyzer for lucene index
-   */
-  private Analyzer analyzer;
-
-  LuceneCoarseGrainDataMap(Analyzer analyzer) {
-    this.analyzer = analyzer;
-  }
-
-  /**
-   * It is called to load the data map to memory or to initialize it.
-   */
-  @Override
-  public void init(DataMapModel dataMapModel) throws MemoryException, IOException {
-    // get this path from file path
-    Path indexPath = FileFactory.getPath(dataMapModel.getFilePath());
-
-    LOGGER.info("Lucene index read path " + indexPath.toString());
-
-    // get file system , use hdfs file system , realized in solr project
-    FileSystem fs = FileFactory.getFileSystem(indexPath);
-
-    // check this path valid
-    if (!fs.exists(indexPath)) {
-      String errorMessage = String.format("index directory %s not exists.", indexPath);
-      LOGGER.error(errorMessage);
-      throw new IOException(errorMessage);
-    }
-
-    if (!fs.isDirectory(indexPath)) {
-      String errorMessage = String.format("error index path %s, must be directory", indexPath);
-      LOGGER.error(errorMessage);
-      throw new IOException(errorMessage);
-    }
-
-    // open this index path , use HDFS default configuration
-    Directory indexDir = new HdfsDirectory(indexPath, FileFactory.getConfiguration());
-
-    IndexReader indexReader = DirectoryReader.open(indexDir);
-    if (indexReader == null) {
-      throw new RuntimeException("failed to create index reader object");
-    }
-
-    // create a index searcher object
-    indexSearcher = new IndexSearcher(indexReader);
-  }
-
-  /**
-   * Prune the datamap with filter expression. It returns the list of
-   * blocklets where these filters can exist.
-   */
-  @Override
-  public List<Blocklet> prune(FilterResolverIntf filterExp, SegmentProperties segmentProperties,
-      List<PartitionSpec> partitions) throws IOException {
-
-    // convert filter expr into lucene list query
-    List<String> fields = new ArrayList<String>();
-
-    // only for test , query all data
-    String strQuery = "*:*";
-
-    String[] sFields = new String[fields.size()];
-    fields.toArray(sFields);
-
-    // get analyzer
-    if (analyzer == null) {
-      analyzer = new StandardAnalyzer();
-    }
-
-    // use MultiFieldQueryParser to parser query
-    QueryParser queryParser = new MultiFieldQueryParser(sFields, analyzer);
-    Query query;
-    try {
-      query = queryParser.parse(strQuery);
-    } catch (ParseException e) {
-      String errorMessage = String
-          .format("failed to filter block with query %s, detail is %s", strQuery, e.getMessage());
-      LOGGER.error(errorMessage);
-      return null;
-    }
-
-    // execute index search
-    TopDocs result;
-    try {
-      result = indexSearcher.search(query, MAX_RESULT_NUMBER);
-    } catch (IOException e) {
-      String errorMessage =
-          String.format("failed to search lucene data, detail is %s", e.getMessage());
-      LOGGER.error(errorMessage);
-      throw new IOException(errorMessage);
-    }
-
-    // temporary data, delete duplicated data
-    // Map<BlockId, Map<BlockletId, Map<PageId, Set<RowId>>>>
-    Map<String, Set<Number>> mapBlocks = new HashMap<String, Set<Number>>();
-
-    for (ScoreDoc scoreDoc : result.scoreDocs) {
-      // get a document
-      Document doc = indexSearcher.doc(scoreDoc.doc);
-
-      // get all fields
-      List<IndexableField> fieldsInDoc = doc.getFields();
-
-      // get this block id Map<BlockId, Set<BlockletId>>>>
-      String blockId = fieldsInDoc.get(BLOCKID_ID).stringValue();
-      Set<Number> setBlocklets = mapBlocks.get(blockId);
-      if (setBlocklets == null) {
-        setBlocklets = new HashSet<Number>();
-        mapBlocks.put(blockId, setBlocklets);
-      }
-
-      // get the blocklet id Set<BlockletId>
-      Number blockletId = fieldsInDoc.get(BLOCKLETID_ID).numericValue();
-      if (!setBlocklets.contains(blockletId.intValue())) {
-        setBlocklets.add(blockletId.intValue());
-      }
-    }
-
-    // result blocklets
-    List<Blocklet> blocklets = new ArrayList<Blocklet>();
-
-    // transform all blocks into result type blocklets Map<BlockId, Set<BlockletId>>
-    for (Map.Entry<String, Set<Number>> mapBlock : mapBlocks.entrySet()) {
-      String blockId = mapBlock.getKey();
-      Set<Number> setBlocklets = mapBlock.getValue();
-
-      // for blocklets in this block Set<BlockletId>
-      for (Number blockletId : setBlocklets) {
-
-        // add a CoarseGrainBlocklet
-        blocklets.add(new Blocklet(blockId, blockletId.toString()));
-      }
-    }
-
-    return blocklets;
-  }
-
-  @Override
-  public boolean isScanRequired(FilterResolverIntf filterExp) {
-    return true;
-  }
-
-  /**
-   * Clear complete index table and release memory.
-   */
-  @Override
-  public void clear() {
-
-  }
-
-  @Override
-  public void finish() {
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMapFactory.java
----------------------------------------------------------------------
diff --git a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMapFactory.java b/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMapFactory.java
deleted file mode 100644
index b9c2ffa..0000000
--- a/datamap/lucene/src/main/java/org/apache/carbondata/datamap/lucene/LuceneCoarseGrainDataMapFactory.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.datamap.lucene;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.carbondata.common.annotations.InterfaceAudience;
-import org.apache.carbondata.common.exceptions.sql.MalformedDataMapCommandException;
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.datamap.DataMapDistributable;
-import org.apache.carbondata.core.datamap.DataMapLevel;
-import org.apache.carbondata.core.datamap.Segment;
-import org.apache.carbondata.core.datamap.dev.DataMapModel;
-import org.apache.carbondata.core.datamap.dev.DataMapWriter;
-import org.apache.carbondata.core.datamap.dev.cgdatamap.CoarseGrainDataMap;
-import org.apache.carbondata.core.features.TableOperation;
-import org.apache.carbondata.core.memory.MemoryException;
-import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
-import org.apache.carbondata.core.metadata.schema.table.DataMapSchema;
-
-/**
- * CG level of lucene DataMap
- */
-@InterfaceAudience.Internal
-public class LuceneCoarseGrainDataMapFactory extends LuceneDataMapFactoryBase<CoarseGrainDataMap> {
-  private static final LogService LOGGER =
-      LogServiceFactory.getLogService(LuceneCoarseGrainDataMapFactory.class.getName());
-
-  public LuceneCoarseGrainDataMapFactory(CarbonTable carbonTable, DataMapSchema dataMapSchema)
-      throws MalformedDataMapCommandException {
-    super(carbonTable, dataMapSchema);
-  }
-
-  /**
-   * Get the datamap for segmentid
-   */
-  @Override
-  public List<CoarseGrainDataMap> getDataMaps(Segment segment) throws IOException {
-    List<CoarseGrainDataMap> lstDataMap = new ArrayList<>();
-    CoarseGrainDataMap dataMap = new LuceneCoarseGrainDataMap(analyzer);
-    try {
-      dataMap.init(new DataMapModel(
-          DataMapWriter.getDefaultDataMapPath(
-              tableIdentifier.getTablePath(), segment.getSegmentNo(), dataMapName)));
-    } catch (MemoryException e) {
-      LOGGER.error("failed to get lucene datamap , detail is {}" + e.getMessage());
-      return lstDataMap;
-    }
-    lstDataMap.add(dataMap);
-    return lstDataMap;
-  }
-
-  /**
-   * Get datamaps for distributable object.
-   */
-  @Override
-  public List<CoarseGrainDataMap> getDataMaps(DataMapDistributable distributable)
-      throws IOException {
-    return getDataMaps(distributable.getSegment());
-  }
-
-  @Override
-  public DataMapLevel getDataMapLevel() {
-    return DataMapLevel.CG;
-  }
-
-  @Override
-  public void deleteDatamapData() {
-
-  }
-
-  @Override
-  public boolean willBecomeStale(TableOperation operation) {
-    return false;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/datamap/mv/core/pom.xml
----------------------------------------------------------------------
diff --git a/datamap/mv/core/pom.xml b/datamap/mv/core/pom.xml
index 54960a1..77b5cc7 100644
--- a/datamap/mv/core/pom.xml
+++ b/datamap/mv/core/pom.xml
@@ -31,6 +31,7 @@
 
   <properties>
     <dev.path>${basedir}/../../../dev</dev.path>
+    <jacoco.append>true</jacoco.append>
   </properties>
 
   <dependencies>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/MatchConditions.scala
----------------------------------------------------------------------
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/MatchConditions.scala b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/MatchConditions.scala
deleted file mode 100644
index 2a4da27..0000000
--- a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/MatchConditions.scala
+++ /dev/null
@@ -1,28 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.mv.rewrite
-
-// TODO: implement this to modularize DefaultMatchingFunctions
-object MatchConditions {
-}
-
-class MatchConditions(flags: Long) {
-  def hasFlag(flag: Long): Boolean = {
-    throw new UnsupportedOperationException
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/hadoop/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop/pom.xml b/hadoop/pom.xml
index 1d7fab3..592d831 100644
--- a/hadoop/pom.xml
+++ b/hadoop/pom.xml
@@ -31,6 +31,7 @@
 
   <properties>
     <dev.path>${basedir}/../dev</dev.path>
+    <jacoco.append>true</jacoco.append>
   </properties>
 
   <dependencies>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java
deleted file mode 100644
index 8be1e2e..0000000
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/CacheClient.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.hadoop;
-
-import org.apache.carbondata.core.cache.Cache;
-import org.apache.carbondata.core.cache.CacheProvider;
-import org.apache.carbondata.core.cache.CacheType;
-import org.apache.carbondata.core.datastore.TableSegmentUniqueIdentifier;
-import org.apache.carbondata.core.datastore.block.SegmentTaskIndexWrapper;
-
-/**
- * CacheClient : Holds all the Cache access clients for Btree, Dictionary
- */
-public class CacheClient {
-
-  // segment access client for driver LRU cache
-  private CacheAccessClient<TableSegmentUniqueIdentifier, SegmentTaskIndexWrapper>
-      segmentAccessClient;
-
-  public CacheClient() {
-    Cache<TableSegmentUniqueIdentifier, SegmentTaskIndexWrapper> segmentCache =
-        CacheProvider.getInstance().createCache(CacheType.DRIVER_BTREE);
-    segmentAccessClient = new CacheAccessClient<>(segmentCache);
-  }
-
-  public CacheAccessClient<TableSegmentUniqueIdentifier, SegmentTaskIndexWrapper>
-      getSegmentAccessClient() {
-    return segmentAccessClient;
-  }
-
-  public void close() {
-    segmentAccessClient.close();
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/CarbonFormatType.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/CarbonFormatType.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/CarbonFormatType.java
deleted file mode 100644
index fe63e88..0000000
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/CarbonFormatType.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.hadoop.internal;
-
-public enum CarbonFormatType {
-  COLUMNAR
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/CarbonInputSplit.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/CarbonInputSplit.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/CarbonInputSplit.java
deleted file mode 100644
index 23b39af..0000000
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/CarbonInputSplit.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.hadoop.internal;
-
-import org.apache.hadoop.mapreduce.InputSplit;
-
-/**
- * Carbon input split can be different format, application should create the record reader
- * based on format type.
- */
-public abstract class CarbonInputSplit extends InputSplit {
-
-  /**
-   * @return the format type of this split.
-   */
-  public abstract CarbonFormatType formatType();
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java
deleted file mode 100644
index b619158..0000000
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/internal/segment/Segment.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.carbondata.hadoop.internal.segment;
-
-import java.io.IOException;
-import java.util.List;
-
-import org.apache.carbondata.core.scan.filter.resolver.FilterResolverIntf;
-
-import org.apache.hadoop.mapreduce.InputSplit;
-import org.apache.hadoop.mapreduce.JobContext;
-
-/**
- * Within a carbon table, each data load becomes one Segment,
- * which stores all data files belong to this load in the segment folder.
- */
-public abstract class Segment {
-
-  protected String id;
-
-  /**
-   * Path of the segment folder
-   */
-  private String path;
-
-  public Segment(String id, String path) {
-    this.id = id;
-    this.path = path;
-  }
-
-  public String getId() {
-    return id;
-  }
-
-  public String getPath() {
-    return path;
-  }
-
-  /**
-   * get all files, implementation may use the input filter and index to prune files
-   * @param job job context
-   * @param filterResolver filter
-   * @return all files
-   */
-  public abstract List<InputSplit> getSplits(JobContext job, FilterResolverIntf filterResolver)
-      throws IOException;
-
-  /**
-   * This is called after Segment is loaded and before segment is committed,
-   * implementation should load indices if required
-   * @param job job context
-   */
-  public abstract void setupForRead(JobContext job) throws IOException;
-
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
----------------------------------------------------------------------
diff --git a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
index 63acad3..287f623 100644
--- a/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
+++ b/hadoop/src/main/java/org/apache/carbondata/hadoop/testutil/StoreCreator.java
@@ -195,84 +195,70 @@ public class StoreCreator {
     encodings.add(Encoding.DICTIONARY);
     ColumnSchema id = new ColumnSchema();
     id.setColumnName("ID");
-    id.setColumnar(true);
     id.setDataType(DataTypes.INT);
     id.setEncodingList(encodings);
     id.setColumnUniqueId(UUID.randomUUID().toString());
     id.setColumnReferenceId(id.getColumnUniqueId());
     id.setDimensionColumn(true);
-    id.setColumnGroup(1);
     columnSchemas.add(id);
 
     ColumnSchema date = new ColumnSchema();
     date.setColumnName("date");
-    date.setColumnar(true);
     date.setDataType(DataTypes.STRING);
     date.setEncodingList(encodings);
     date.setColumnUniqueId(UUID.randomUUID().toString());
     date.setDimensionColumn(true);
-    date.setColumnGroup(2);
     date.setSortColumn(true);
     date.setColumnReferenceId(id.getColumnUniqueId());
     columnSchemas.add(date);
 
     ColumnSchema country = new ColumnSchema();
     country.setColumnName("country");
-    country.setColumnar(true);
     country.setDataType(DataTypes.STRING);
     country.setEncodingList(encodings);
     country.setColumnUniqueId(UUID.randomUUID().toString());
     country.setDimensionColumn(true);
-    country.setColumnGroup(3);
     country.setSortColumn(true);
     country.setColumnReferenceId(id.getColumnUniqueId());
     columnSchemas.add(country);
 
     ColumnSchema name = new ColumnSchema();
     name.setColumnName("name");
-    name.setColumnar(true);
     name.setDataType(DataTypes.STRING);
     name.setEncodingList(encodings);
     name.setColumnUniqueId(UUID.randomUUID().toString());
     name.setDimensionColumn(true);
-    name.setColumnGroup(4);
     name.setSortColumn(true);
     name.setColumnReferenceId(id.getColumnUniqueId());
     columnSchemas.add(name);
 
     ColumnSchema phonetype = new ColumnSchema();
     phonetype.setColumnName("phonetype");
-    phonetype.setColumnar(true);
     phonetype.setDataType(DataTypes.STRING);
     phonetype.setEncodingList(encodings);
     phonetype.setColumnUniqueId(UUID.randomUUID().toString());
     phonetype.setDimensionColumn(true);
-    phonetype.setColumnGroup(5);
     phonetype.setSortColumn(true);
     phonetype.setColumnReferenceId(id.getColumnUniqueId());
     columnSchemas.add(phonetype);
 
     ColumnSchema serialname = new ColumnSchema();
     serialname.setColumnName("serialname");
-    serialname.setColumnar(true);
     serialname.setDataType(DataTypes.STRING);
     serialname.setEncodingList(encodings);
     serialname.setColumnUniqueId(UUID.randomUUID().toString());
     serialname.setDimensionColumn(true);
-    serialname.setColumnGroup(6);
     serialname.setSortColumn(true);
     serialname.setColumnReferenceId(id.getColumnUniqueId());
     columnSchemas.add(serialname);
 
     ColumnSchema salary = new ColumnSchema();
     salary.setColumnName("salary");
-    salary.setColumnar(true);
     salary.setDataType(DataTypes.INT);
     salary.setEncodingList(new ArrayList<Encoding>());
     salary.setColumnUniqueId(UUID.randomUUID().toString());
     salary.setDimensionColumn(false);
     salary.setColumnReferenceId(id.getColumnUniqueId());
-    salary.setColumnGroup(7);
     columnSchemas.add(salary);
 
     tableSchema.setListOfColumns(columnSchemas);
@@ -391,7 +377,6 @@ public class StoreCreator {
     CarbonProperties.getInstance().addProperty("is.int.based.indexer", "true");
     CarbonProperties.getInstance().addProperty("aggregate.columnar.keyblock", "true");
     CarbonProperties.getInstance().addProperty("is.compressed.keyblock", "false");
-    CarbonProperties.getInstance().addProperty("carbon.leaf.node.size", "120000");
 
     String graphPath =
         outPutLoc + File.separator + loadModel.getDatabaseName() + File.separator + tableName

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/hive/pom.xml
----------------------------------------------------------------------
diff --git a/integration/hive/pom.xml b/integration/hive/pom.xml
index c144353..081a5c0 100644
--- a/integration/hive/pom.xml
+++ b/integration/hive/pom.xml
@@ -32,6 +32,7 @@
     <properties>
         <hive.version>1.2.1</hive.version>
         <dev.path>${basedir}/../../dev</dev.path>
+        <jacoco.append>true</jacoco.append>
     </properties>
 
     <dependencies>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/presto/pom.xml
----------------------------------------------------------------------
diff --git a/integration/presto/pom.xml b/integration/presto/pom.xml
index c61023a..5ede3e9 100644
--- a/integration/presto/pom.xml
+++ b/integration/presto/pom.xml
@@ -33,6 +33,7 @@
   <properties>
     <presto.version>0.187</presto.version>
     <dev.path>${basedir}/../../dev</dev.path>
+    <jacoco.append>true</jacoco.append>
   </properties>
 
   <dependencies>

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataColumnHandle.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataColumnHandle.java b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataColumnHandle.java
index 4ec145d..9be5560 100755
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataColumnHandle.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataColumnHandle.java
@@ -39,9 +39,7 @@ public class CarbondataColumnHandle implements ColumnHandle {
   private final Type columnType;
   private final int ordinalPosition;
   private final int keyOrdinal;
-  private final int columnGroupOrdinal;
 
-  private final int columnGroupId;
   private final String columnUniqueId;
   private final boolean isInvertedIndex;
 
@@ -63,27 +61,13 @@ public class CarbondataColumnHandle implements ColumnHandle {
     return keyOrdinal;
   }
 
-  public int getColumnGroupOrdinal() {
-    return columnGroupOrdinal;
-  }
-
-  public int getColumnGroupId() {
-    return columnGroupId;
-  }
-
-  public String getColumnUniqueId() {
-    return columnUniqueId;
-  }
-
   @JsonCreator public CarbondataColumnHandle(
       @JsonProperty("connectorId") String connectorId,
       @JsonProperty("columnName") String columnName,
       @JsonProperty("columnType") Type columnType,
       @JsonProperty("ordinalPosition") int ordinalPosition,
       @JsonProperty("keyOrdinal") int keyOrdinal,
-      @JsonProperty("columnGroupOrdinal") int columnGroupOrdinal,
       @JsonProperty("isMeasure") boolean isMeasure,
-      @JsonProperty("columnGroupId") int columnGroupId,
       @JsonProperty("columnUniqueId") String columnUniqueId,
       @JsonProperty("isInvertedIndex") boolean isInvertedIndex,
       @JsonProperty("precision") int precision,
@@ -94,10 +78,8 @@ public class CarbondataColumnHandle implements ColumnHandle {
 
     this.ordinalPosition = requireNonNull(ordinalPosition, "ordinalPosition is null");
     this.keyOrdinal = requireNonNull(keyOrdinal, "keyOrdinal is null");
-    this.columnGroupOrdinal = requireNonNull(columnGroupOrdinal, "columnGroupOrdinal is null");
 
     this.isMeasure = isMeasure;
-    this.columnGroupId = requireNonNull(columnGroupId, "columnGroupId is null");
     this.columnUniqueId = columnUniqueId;//requireNonNull(columnUniqueId, "columnUniqueId is null");
     this.isInvertedIndex = requireNonNull(isInvertedIndex, "isInvertedIndex is null");
     this.precision = precision;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataMetadata.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataMetadata.java b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataMetadata.java
index 256e405..097d667 100755
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataMetadata.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataMetadata.java
@@ -176,20 +176,18 @@ public class CarbondataMetadata implements ConnectorMetadata {
 
       Type spiType = carbonDataType2SpiMapper(cs);
       columnHandles.put(cs.getColumnName(),
-          new CarbondataColumnHandle(connectorId, cs.getColumnName(), spiType,
-              column.getSchemaOrdinal(), column.getKeyOrdinal(), column.getColumnGroupOrdinal(),
-              false, cs.getColumnGroupId(), cs.getColumnUniqueId(), cs.isUseInvertedIndex(),
-              cs.getPrecision(), cs.getScale()));
+          new CarbondataColumnHandle(connectorId, cs.getColumnName(), spiType, column.getSchemaOrdinal(),
+              column.getKeyOrdinal(), false,
+              cs.getColumnUniqueId(), cs.isUseInvertedIndex(), cs.getPrecision(), cs.getScale()));
     }
 
     for (CarbonMeasure measure : cb.getMeasureByTableName(tableName)) {
       ColumnSchema cs = measure.getColumnSchema();
       Type spiType = carbonDataType2SpiMapper(cs);
       columnHandles.put(cs.getColumnName(),
-          new CarbondataColumnHandle(connectorId, cs.getColumnName(), spiType,
-              cs.getSchemaOrdinal(), measure.getOrdinal(), cs.getColumnGroupId(), true,
-              cs.getColumnGroupId(), cs.getColumnUniqueId(), cs.isUseInvertedIndex(),
-              cs.getPrecision(), cs.getScale()));
+          new CarbondataColumnHandle(connectorId, cs.getColumnName(), spiType, cs.getSchemaOrdinal(),
+              measure.getOrdinal(), true,
+              cs.getColumnUniqueId(), cs.isUseInvertedIndex(), cs.getPrecision(), cs.getScale()));
     }
 
     columnHandleMap = columnHandles.build();

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
----------------------------------------------------------------------
diff --git a/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala b/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
index f4415b8..ffa5b00 100644
--- a/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
+++ b/integration/presto/src/test/scala/org/apache/carbondata/presto/util/CarbonDataStoreCreator.scala
@@ -155,13 +155,11 @@ object CarbonDataStoreCreator {
 
     val id: ColumnSchema = new ColumnSchema()
     id.setColumnName("ID")
-    id.setColumnar(true)
     id.setDataType(DataTypes.INT)
     id.setEncodingList(dictionaryEncoding)
     id.setColumnUniqueId(UUID.randomUUID().toString)
     id.setColumnReferenceId(id.getColumnUniqueId)
     id.setDimensionColumn(true)
-    id.setColumnGroup(1)
     id.setSchemaOrdinal(0)
     columnSchemas.add(id)
 
@@ -172,94 +170,79 @@ object CarbonDataStoreCreator {
 
     val date: ColumnSchema = new ColumnSchema()
     date.setColumnName("date")
-    date.setColumnar(true)
     date.setDataType(DataTypes.DATE)
     date.setEncodingList(directDictionaryEncoding)
     date.setColumnUniqueId(UUID.randomUUID().toString)
     date.setDimensionColumn(true)
-    date.setColumnGroup(2)
     date.setColumnReferenceId(date.getColumnUniqueId)
     date.setSchemaOrdinal(1)
     columnSchemas.add(date)
 
     val country: ColumnSchema = new ColumnSchema()
     country.setColumnName("country")
-    country.setColumnar(true)
     country.setDataType(DataTypes.STRING)
     country.setEncodingList(dictionaryEncoding)
     country.setColumnUniqueId(UUID.randomUUID().toString)
     country.setColumnReferenceId(country.getColumnUniqueId)
     country.setDimensionColumn(true)
-    country.setColumnGroup(3)
     country.setSchemaOrdinal(2)
     country.setColumnReferenceId(country.getColumnUniqueId)
     columnSchemas.add(country)
 
     val name: ColumnSchema = new ColumnSchema()
     name.setColumnName("name")
-    name.setColumnar(true)
     name.setDataType(DataTypes.STRING)
     name.setEncodingList(dictionaryEncoding)
     name.setColumnUniqueId(UUID.randomUUID().toString)
     name.setDimensionColumn(true)
-    name.setColumnGroup(4)
     name.setSchemaOrdinal(3)
     name.setColumnReferenceId(name.getColumnUniqueId)
     columnSchemas.add(name)
 
     val phonetype: ColumnSchema = new ColumnSchema()
     phonetype.setColumnName("phonetype")
-    phonetype.setColumnar(true)
     phonetype.setDataType(DataTypes.STRING)
     phonetype.setEncodingList(dictionaryEncoding)
     phonetype.setColumnUniqueId(UUID.randomUUID().toString)
     phonetype.setDimensionColumn(true)
-    phonetype.setColumnGroup(5)
     phonetype.setSchemaOrdinal(4)
     phonetype.setColumnReferenceId(phonetype.getColumnUniqueId)
     columnSchemas.add(phonetype)
 
     val serialname: ColumnSchema = new ColumnSchema()
     serialname.setColumnName("serialname")
-    serialname.setColumnar(true)
     serialname.setDataType(DataTypes.STRING)
     serialname.setEncodingList(dictionaryEncoding)
     serialname.setColumnUniqueId(UUID.randomUUID().toString)
     serialname.setDimensionColumn(true)
-    serialname.setColumnGroup(6)
     serialname.setSchemaOrdinal(5)
     serialname.setColumnReferenceId(serialname.getColumnUniqueId)
     columnSchemas.add(serialname)
 
     val salary: ColumnSchema = new ColumnSchema()
     salary.setColumnName("salary")
-    salary.setColumnar(true)
     salary.setDataType(DataTypes.DOUBLE)
     salary.setEncodingList(dictionaryEncoding)
     salary.setColumnUniqueId(UUID.randomUUID().toString)
     salary.setDimensionColumn(true)
-    salary.setColumnGroup(7)
     salary.setSchemaOrdinal(6)
     salary.setColumnReferenceId(salary.getColumnUniqueId)
     columnSchemas.add(salary)
 
     val bonus: ColumnSchema = new ColumnSchema()
     bonus.setColumnName("bonus")
-    bonus.setColumnar(true)
     bonus.setDataType(DataTypes.createDecimalType(10, 4))
     bonus.setPrecision(10)
     bonus.setScale(4)
     bonus.setEncodingList(invertedIndexEncoding)
     bonus.setColumnUniqueId(UUID.randomUUID().toString)
     bonus.setDimensionColumn(false)
-    bonus.setColumnGroup(8)
     bonus.setSchemaOrdinal(7)
     bonus.setColumnReferenceId(bonus.getColumnUniqueId)
     columnSchemas.add(bonus)
 
     val monthlyBonus: ColumnSchema = new ColumnSchema()
     monthlyBonus.setColumnName("monthlyBonus")
-    monthlyBonus.setColumnar(true)
     monthlyBonus.setDataType(DataTypes.createDecimalType(18, 4))
     monthlyBonus.setPrecision(18)
     monthlyBonus.setScale(4)
@@ -267,42 +250,35 @@ object CarbonDataStoreCreator {
     monthlyBonus.setEncodingList(invertedIndexEncoding)
     monthlyBonus.setColumnUniqueId(UUID.randomUUID().toString)
     monthlyBonus.setDimensionColumn(false)
-    monthlyBonus.setColumnGroup(9)
     monthlyBonus.setColumnReferenceId(monthlyBonus.getColumnUniqueId)
     columnSchemas.add(monthlyBonus)
 
     val dob: ColumnSchema = new ColumnSchema()
     dob.setColumnName("dob")
-    dob.setColumnar(true)
     dob.setDataType(DataTypes.TIMESTAMP)
     dob.setEncodingList(directDictionaryEncoding)
     dob.setColumnUniqueId(UUID.randomUUID().toString)
     dob.setDimensionColumn(true)
-    dob.setColumnGroup(9)
     dob.setSchemaOrdinal(9)
     dob.setColumnReferenceId(dob.getColumnUniqueId)
     columnSchemas.add(dob)
 
     val shortField: ColumnSchema = new ColumnSchema()
     shortField.setColumnName("shortField")
-    shortField.setColumnar(true)
     shortField.setDataType(DataTypes.SHORT)
     shortField.setEncodingList(dictionaryEncoding)
     shortField.setColumnUniqueId(UUID.randomUUID().toString)
     shortField.setDimensionColumn(true)
-    shortField.setColumnGroup(10)
     shortField.setSchemaOrdinal(10)
     shortField.setColumnReferenceId(shortField.getColumnUniqueId)
     columnSchemas.add(shortField)
 
     val isCurrentEmployee: ColumnSchema = new ColumnSchema()
     isCurrentEmployee.setColumnName("isCurrentEmployee")
-    isCurrentEmployee.setColumnar(true)
     isCurrentEmployee.setDataType(DataTypes.BOOLEAN)
     isCurrentEmployee.setEncodingList(invertedIndexEncoding)
     isCurrentEmployee.setColumnUniqueId(UUID.randomUUID().toString)
     isCurrentEmployee.setDimensionColumn(false)
-    isCurrentEmployee.setColumnGroup(11)
     isCurrentEmployee.setColumnReferenceId(isCurrentEmployee.getColumnUniqueId)
     columnSchemas.add(isCurrentEmployee)
 
@@ -479,7 +455,6 @@ object CarbonDataStoreCreator {
     CarbonProperties.getInstance
       .addProperty("aggregate.columnar.keyblock", "true")
     CarbonProperties.getInstance.addProperty("is.compressed.keyblock", "false")
-    CarbonProperties.getInstance.addProperty("carbon.leaf.node.size", "120000")
     CarbonProperties.getInstance
       .addProperty("carbon.direct.dictionary", "true")
     val graphPath: String = outPutLoc + File.separator + loadModel.getDatabaseName +

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/spark-common-test/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/pom.xml b/integration/spark-common-test/pom.xml
index a967806..ee87d92 100644
--- a/integration/spark-common-test/pom.xml
+++ b/integration/spark-common-test/pom.xml
@@ -31,6 +31,7 @@
 
   <properties>
     <dev.path>${basedir}/../../dev</dev.path>
+    <jacoco.append>true</jacoco.append>
     <build.directory.projectCommon>../../common/target</build.directory.projectCommon>
     <build.directory.projectCore>../../core/target</build.directory.projectCore>
     <build.directory.projectProcessing>../../processing/target</build.directory.projectProcessing>
@@ -40,8 +41,8 @@
     <build.directory.projectSpark2>../../integration/spark2/target</build.directory.projectSpark2>
     <build.directory.projectSparkCommon>../../integration/spark-common/target</build.directory.projectSparkCommon>
     <build.directory.projectSparkCommonTest>../../integration/spark-common-test/target</build.directory.projectSparkCommonTest>
-    <build.directory.projectHive>../../integration/hive/target</build.directory.projectHive>
-    <build.directory.projectPresto>../../integration/presto/target</build.directory.projectPresto>
+    <!--<build.directory.projectHive>../../integration/hive/target</build.directory.projectHive>-->
+    <!--<build.directory.projectPresto>../../integration/presto/target</build.directory.projectPresto>-->
     <build.directory.projectStoreSdk>../../store/sdk/target</build.directory.projectStoreSdk>
     <build.directory.projectStoreSearch>../../store/search/target</build.directory.projectStoreSearch>
     <build.directory.projectStreaming>../../streaming/target</build.directory.projectStreaming>
@@ -57,8 +58,8 @@
     <classes.directory.projectSpark2>../../integration/spark2/target/classes</classes.directory.projectSpark2>
     <classes.directory.projectSparkCommon>../../integration/spark-common/target/classes</classes.directory.projectSparkCommon>
     <classes.directory.projectSparkCommonTest>../../integration/spark-common-test/target/classes</classes.directory.projectSparkCommonTest>
-    <classes.directory.projectHive>../../integration/hive/target/classes</classes.directory.projectHive>
-    <classes.directory.projectPresto>../../integration/presto/target/classes</classes.directory.projectPresto>
+    <!--<classes.directory.projectHive>../../integration/hive/target/classes</classes.directory.projectHive>-->
+    <!--<classes.directory.projectPresto>../../integration/presto/target/classes</classes.directory.projectPresto>-->
     <classes.directory.projectStoreSdk>../../store/sdk/target/classes</classes.directory.projectStoreSdk>
     <classes.directory.projectStoreSearch>../../store/search/target/classes</classes.directory.projectStoreSearch>
     <classes.directory.projectStreaming>../../streaming/target/classes</classes.directory.projectStreaming>
@@ -76,10 +77,10 @@
     <sources.directory.projectSpark2>../../integration/spark2/src/main/scala</sources.directory.projectSpark2>
     <sources.directory.projectSparkCommon>../../integration/spark-common/src/main/java</sources.directory.projectSparkCommon>
     <sources.directory.projectSparkCommon>../../integration/spark-common/src/main/scala</sources.directory.projectSparkCommon>
-    <sources.directory.projectHive>../../integration/hive/src/main/java</sources.directory.projectHive>
-    <sources.directory.projectHive>../../integration/hive/src/main/scala</sources.directory.projectHive>
-    <sources.directory.projectPresto>../../integration/presto/src/main/java</sources.directory.projectPresto>
-    <sources.directory.projectPresto>../../integration/presto/src/main/scala</sources.directory.projectPresto>
+    <!--<sources.directory.projectHive>../../integration/hive/src/main/java</sources.directory.projectHive>-->
+    <!--<sources.directory.projectHive>../../integration/hive/src/main/scala</sources.directory.projectHive>-->
+    <!--<sources.directory.projectPresto>../../integration/presto/src/main/java</sources.directory.projectPresto>-->
+    <!--<sources.directory.projectPresto>../../integration/presto/src/main/scala</sources.directory.projectPresto>-->
     <sources.directory.projectStoreSdk>../../store/sdk/src/main/java</sources.directory.projectStoreSdk>
     <sources.directory.projectStoreSearch>../../store/search/src/main/java</sources.directory.projectStoreSearch>
     <sources.directory.projectStoreSearch>../../store/search/src/main/scala</sources.directory.projectStoreSearch>
@@ -97,8 +98,8 @@
     <generated-sources.directory.projectSpark2>../../integration/spark2/target/generated-sources/annotations</generated-sources.directory.projectSpark2>
     <generated-sources.directory.projectSparkCommon>../../integration/spark-common/target/generated-sources/annotations</generated-sources.directory.projectSparkCommon>
     <generated-sources.directory.projectSparkCommonTest>../../integration/spark-common-test/target/generated-sources/annotations</generated-sources.directory.projectSparkCommonTest>
-    <generated-sources.directory.projectHive>../../integration/hive/target/generated-sources/annotations</generated-sources.directory.projectHive>
-    <generated-sources.directory.projectPresto>../../integration/presto/target/generated-sources/annotations</generated-sources.directory.projectPresto>
+    <!--<generated-sources.directory.projectHive>../../integration/hive/target/generated-sources/annotations</generated-sources.directory.projectHive>-->
+    <!--<generated-sources.directory.projectPresto>../../integration/presto/target/generated-sources/annotations</generated-sources.directory.projectPresto>-->
     <generated-sources.directory.projectStoreSdk>../../store/sdk/target/generated-sources/annotations</generated-sources.directory.projectStoreSdk>
     <generated-sources.directory.projectStoreSearch>../../store/search/target/generated-sources/annotations</generated-sources.directory.projectStoreSearch>
     <generated-sources.directory.projectStreaming>../../streaming/target/generated-sources/annotations</generated-sources.directory.projectStreaming>
@@ -324,12 +325,12 @@
                     <fileset dir="${build.directory.projectSparkCommonTest}">
                       <include name="jacoco.exec" />
                     </fileset>
-                    <fileset dir="${build.directory.projectHive}" erroronmissingdir="false">
+                    <!--<fileset dir="${build.directory.projectHive}" erroronmissingdir="false">
                       <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectPresto}" erroronmissingdir="false">
+                    </fileset>-->
+                    <!--<fileset dir="${build.directory.projectPresto}" erroronmissingdir="false">
                       <include name="jacoco.exec" />
-                    </fileset>
+                    </fileset>-->
                     <fileset dir="${build.directory.projectStoreSdk}" erroronmissingdir="false">
                       <include name="jacoco.exec" />
                     </fileset>
@@ -354,13 +355,13 @@
                         <fileset dir="${classes.directory.projectCore}" />
                         <fileset dir="${classes.directory.projectProcessing}" />
                         <fileset dir="${classes.directory.projectHadoop}" />
-                        <fileset dir="${classes.directory.projectFormat}" erroronmissingdir="false"/>
+                        <!--<fileset dir="${classes.directory.projectFormat}" erroronmissingdir="false"/>-->
                         <fileset dir="${classes.directory.projectSpark}" erroronmissingdir="false"/>
                         <fileset dir="${classes.directory.projectSpark2}" erroronmissingdir="false"/>
                         <fileset dir="${classes.directory.projectSparkCommon}" />
                         <fileset dir="${classes.directory.projectSparkCommonTest}" />
-                        <fileset dir="${classes.directory.projectHive}" erroronmissingdir="false" />
-                        <fileset dir="${classes.directory.projectPresto}" erroronmissingdir="false" />
+                        <!--<fileset dir="${classes.directory.projectHive}" erroronmissingdir="false" />-->
+                        <!--<fileset dir="${classes.directory.projectPresto}" erroronmissingdir="false" />-->
                         <fileset dir="${classes.directory.projectStoreSdk}" erroronmissingdir="false" />
                         <fileset dir="${classes.directory.projectStoreSearch}" erroronmissingdir="false" />
                         <fileset dir="${classes.directory.projectStreaming}" erroronmissingdir="false" />
@@ -372,12 +373,12 @@
                         <fileset dir="${sources.directory.projectCore}" />
                         <fileset dir="${sources.directory.projectProcessing}" />
                         <fileset dir="${sources.directory.projectHadoop}" />
-                        <fileset dir="${sources.directory.projectFormat}" erroronmissingdir="false"/>
+                        <!--<fileset dir="${sources.directory.projectFormat}" erroronmissingdir="false"/>-->
                         <fileset dir="${sources.directory.projectSpark}" erroronmissingdir="false"/>
                         <fileset dir="${sources.directory.projectSpark2}" erroronmissingdir="false"/>
                         <fileset dir="${sources.directory.projectSparkCommon}" />
-                        <fileset dir="${sources.directory.projectHive}" erroronmissingdir="false" />
-                        <fileset dir="${sources.directory.projectPresto}" erroronmissingdir="false" />
+                        <!--<fileset dir="${sources.directory.projectHive}" erroronmissingdir="false" />-->
+                        <!--<fileset dir="${sources.directory.projectPresto}" erroronmissingdir="false" />-->
                         <fileset dir="${sources.directory.projectStoreSdk}" erroronmissingdir="false" />
                         <fileset dir="${sources.directory.projectStoreSearch}" erroronmissingdir="false" />
                         <fileset dir="${sources.directory.projectStreaming}" erroronmissingdir="false" />

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/spark-common-test/src/test/resources/encoding_types.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/encoding_types.csv b/integration/spark-common-test/src/test/resources/encoding_types.csv
new file mode 100644
index 0000000..70a52ee
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/encoding_types.csv
@@ -0,0 +1,3 @@
+begin_time,name,begin_time1,begin_time2,begin_time3,begin_time4,begin_time5,begin_time6,begin_time7,begin_time8,begin_time9,begin_time10,begin_time11,begin_time12,begin_time13,begin_time14,begin_time15,begin_time16,begin_time17,begin_time18,begin_time19,begin_time20
+1497376581,name1,10000,8388600,125,1497376581,8386600,10000,100,125,1497376581,1497423738,2139095000,1497376581,1497423738,32000,123.4,11.1,3200.1,214744460.2,1497376581,1497376581
+1497408581,name2,32000,45000,25,10000,55000,32000,75,35,1497423838,1497423838,2147484000,1497423838,1497423838,31900,838860.7,12.3,127.1,214748360.2,1497408581,1497408581
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/spark-common-test/src/test/resources/short_int_as_target_type.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/short_int_as_target_type.csv b/integration/spark-common-test/src/test/resources/short_int_as_target_type.csv
deleted file mode 100644
index 964517e..0000000
--- a/integration/spark-common-test/src/test/resources/short_int_as_target_type.csv
+++ /dev/null
@@ -1,3 +0,0 @@
-begin_time,name
-1497376581,name1
-1497423838,name2

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/aggquery/IntegerDataTypeTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/aggquery/IntegerDataTypeTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/aggquery/IntegerDataTypeTestCase.scala
index 3eaf0e8..13066eb 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/aggquery/IntegerDataTypeTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/aggquery/IntegerDataTypeTestCase.scala
@@ -112,37 +112,49 @@ class IntegerDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
       """.stripMargin)
   }
 
-  test("short int as target type in deltaIntegerCodec") {
+  test("test all codecs") {
     sql(
       """
-        | DROP TABLE IF EXISTS short_int_target_table
+        | DROP TABLE IF EXISTS all_encoding_table
       """.stripMargin)
 
     //begin_time column will be encoded by deltaIntegerCodec
     sql(
       """
-        | CREATE TABLE short_int_target_table
-        | (begin_time bigint, name string)
+        | CREATE TABLE all_encoding_table
+        | (begin_time bigint, name string,begin_time1 long,begin_time2 long,begin_time3 long,
+        | begin_time4 long,begin_time5 int,begin_time6 int,begin_time7 int,begin_time8 short,
+        | begin_time9 bigint,begin_time10 bigint,begin_time11 bigint,begin_time12 int,
+        | begin_time13 int,begin_time14 short,begin_time15 double,begin_time16 double,
+        | begin_time17 double,begin_time18 double,begin_time19 int,begin_time20 double)
         | STORED BY 'org.apache.carbondata.format'
-      """.stripMargin)
+      """.stripMargin.replaceAll(System.lineSeparator, ""))
 
     sql(
       s"""
-         | LOAD DATA LOCAL INPATH '$resourcesPath/short_int_as_target_type.csv'
-         | INTO TABLE short_int_target_table
+         | LOAD DATA LOCAL INPATH '$resourcesPath/encoding_types.csv'
+         | INTO TABLE all_encoding_table
       """.stripMargin)
 
     checkAnswer(
-      sql("select begin_time from short_int_target_table"),
-      Seq(Row(1497376581), Row(1497423838))
+      sql("select begin_time from all_encoding_table"),
+      sql("select begin_time from all_encoding_table")
+    )
+
+    val ff = BigInt(2147484000L)
+    checkAnswer(
+      sql("select begin_time,begin_time1,begin_time2,begin_time3,begin_time4,begin_time5,begin_time6,begin_time7,begin_time8,begin_time9,begin_time10,begin_time11,begin_time12,begin_time13,begin_time14,begin_time15,begin_time16,begin_time17,begin_time18,begin_time19,begin_time20 from all_encoding_table"),
+      Seq(Row(1497376581,10000,8388600,125,1497376581,8386600,10000,100,125,1497376581,1497423738,2139095000,1497376581,1497423738,32000,123.4,11.1,3200.1,214744460.2,1497376581,1497376581),
+        Row(1497408581,32000,45000,25,10000,55000,32000,75,35,1497423838,1497423838,ff,1497423838,1497423838,31900,838860.7,12.3,127.1,214748360.2,1497408581,1497408581))
     )
 
     sql(
       """
-        | DROP TABLE short_int_target_table
+        | DROP TABLE all_encoding_table
       """.stripMargin)
   }
 
+
   test("Create a table that contains short data type") {
     sql("CREATE TABLE if not exists short_table(col1 short, col2 BOOLEAN) STORED BY 'carbondata'")
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala
index 28ff91a..2d5f05a 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/DoubleDataTypeTest.scala
@@ -35,14 +35,22 @@ class DoubleDataTypeTest extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists carbon_datatype_AdaptiveFloatingCodec_double_sint")
     sql("drop table if exists carbon_datatype_AdaptiveFloatingCodec_double_int")
     sql("drop table if exists carbon_datatype_AdaptiveFloatingCodec_double_long")
+    sql("DROP TABLE IF EXISTS oscon_carbon_old1")
     sql("""create table oscon_carbon_old (CUST_PRFRD_FLG String,PROD_BRAND_NAME String,PROD_COLOR String,CUST_LAST_RVW_DATE String,CUST_COUNTRY String,CUST_CITY String,PRODUCT_NAME String,CUST_JOB_TITLE String,CUST_STATE String,CUST_BUY_POTENTIAL String,PRODUCT_MODEL String,ITM_ID String,ITM_NAME String,PRMTION_ID String,PRMTION_NAME String,SHP_MODE_ID String,SHP_MODE String,DELIVERY_COUNTRY String,DELIVERY_STATE String,DELIVERY_CITY String,DELIVERY_DISTRICT String,ACTIVE_EMUI_VERSION String,WH_NAME String,STR_ORDER_DATE String,OL_ORDER_NO String,OL_ORDER_DATE String,OL_SITE String,CUST_FIRST_NAME String,CUST_LAST_NAME String,CUST_BIRTH_DY String,CUST_BIRTH_MM String,CUST_BIRTH_YR String,CUST_BIRTH_COUNTRY String,CUST_SEX String,CUST_ADDRESS_ID String,CUST_STREET_NO String,CUST_STREET_NAME String,CUST_AGE String,CUST_SUITE_NO String,CUST_ZIP String,CUST_COUNTY String,PRODUCT_ID String,PROD_SHELL_COLOR String,DEVICE_NAME String,PROD_SHORT_DESC String,PROD_LONG_DESC String,PROD_THUMB 
 String,PROD_IMAGE String,PROD_UPDATE_DATE String,PROD_LIVE String,PROD_LOC String,PROD_RAM String,PROD_ROM String,PROD_CPU_CLOCK String,PROD_SERIES String,ITM_REC_START_DATE String,ITM_REC_END_DATE String,ITM_BRAND_ID String,ITM_BRAND String,ITM_CLASS_ID String,ITM_CLASS String,ITM_CATEGORY_ID String,ITM_CATEGORY String,ITM_MANUFACT_ID String,ITM_MANUFACT String,ITM_FORMULATION String,ITM_COLOR String,ITM_CONTAINER String,ITM_MANAGER_ID String,PRM_START_DATE String,PRM_END_DATE String,PRM_CHANNEL_DMAIL String,PRM_CHANNEL_EMAIL String,PRM_CHANNEL_CAT String,PRM_CHANNEL_TV String,PRM_CHANNEL_RADIO String,PRM_CHANNEL_PRESS String,PRM_CHANNEL_EVENT String,PRM_CHANNEL_DEMO String,PRM_CHANNEL_DETAILS String,PRM_PURPOSE String,PRM_DSCNT_ACTIVE String,SHP_CODE String,SHP_CARRIER String,SHP_CONTRACT String,CHECK_DATE String,CHECK_YR String,CHECK_MM String,CHECK_DY String,CHECK_HOUR String,BOM String,INSIDE_NAME String,PACKING_DATE String,PACKING_YR String,PACKING_MM String,PACKING_DY String,
 PACKING_HOUR String,DELIVERY_PROVINCE String,PACKING_LIST_NO String,ACTIVE_CHECK_TIME String,ACTIVE_CHECK_YR String,ACTIVE_CHECK_MM String,ACTIVE_CHECK_DY String,ACTIVE_CHECK_HOUR String,ACTIVE_AREA_ID String,ACTIVE_COUNTRY String,ACTIVE_PROVINCE String,ACTIVE_CITY String,ACTIVE_DISTRICT String,ACTIVE_NETWORK String,ACTIVE_FIRMWARE_VER String,ACTIVE_OS_VERSION String,LATEST_CHECK_TIME String,LATEST_CHECK_YR String,LATEST_CHECK_MM String,LATEST_CHECK_DY String,LATEST_CHECK_HOUR String,LATEST_AREAID String,LATEST_COUNTRY String,LATEST_PROVINCE String,LATEST_CITY String,LATEST_DISTRICT String,LATEST_FIRMWARE_VER String,LATEST_EMUI_VERSION String,LATEST_OS_VERSION String,LATEST_NETWORK String,WH_ID String,WH_STREET_NO String,WH_STREET_NAME String,WH_STREET_TYPE String,WH_SUITE_NO String,WH_CITY String,WH_COUNTY String,WH_STATE String,WH_ZIP String,WH_COUNTRY String,OL_SITE_DESC String,OL_RET_ORDER_NO String,OL_RET_DATE String,PROD_MODEL_ID String,CUST_ID String,PROD_UNQ_MDL_ID String,CU
 ST_NICK_NAME String,CUST_LOGIN String,CUST_EMAIL_ADDR String,PROD_UNQ_DEVICE_ADDR String,PROD_UQ_UUID String,PROD_BAR_CODE String,TRACKING_NO String,STR_ORDER_NO String,CUST_DEP_COUNT double,CUST_VEHICLE_COUNT double,CUST_ADDRESS_CNT double,CUST_CRNT_CDEMO_CNT double,CUST_CRNT_HDEMO_CNT double,CUST_CRNT_ADDR_DM double,CUST_FIRST_SHIPTO_CNT double,CUST_FIRST_SALES_CNT double,CUST_GMT_OFFSET double,CUST_DEMO_CNT double,CUST_INCOME double,PROD_UNLIMITED double,PROD_OFF_PRICE double,PROD_UNITS double,TOTAL_PRD_COST double,TOTAL_PRD_DISC double,PROD_WEIGHT double,REG_UNIT_PRICE double,EXTENDED_AMT double,UNIT_PRICE_DSCNT_PCT double,DSCNT_AMT double,PROD_STD_CST double,TOTAL_TX_AMT double,FREIGHT_CHRG double,WAITING_PERIOD double,DELIVERY_PERIOD double,ITM_CRNT_PRICE double,ITM_UNITS double,ITM_WSLE_CST double,ITM_SIZE double,PRM_CST double,PRM_RESPONSE_TARGET double,PRM_ITM_DM double,SHP_MODE_CNT double,WH_GMT_OFFSET double,WH_SQ_FT double,STR_ORD_QTY double,STR_WSLE_CST double,STR_LIST_
 PRICE double,STR_SALES_PRICE double,STR_EXT_DSCNT_AMT double,STR_EXT_SALES_PRICE double,STR_EXT_WSLE_CST double,STR_EXT_LIST_PRICE double,STR_EXT_TX double,STR_COUPON_AMT double,STR_NET_PAID double,STR_NET_PAID_INC_TX double,STR_NET_PRFT double,STR_SOLD_YR_CNT double,STR_SOLD_MM_CNT double,STR_SOLD_ITM_CNT double,STR_TOTAL_CUST_CNT double,STR_AREA_CNT double,STR_DEMO_CNT double,STR_OFFER_CNT double,STR_PRM_CNT double,STR_TICKET_CNT double,STR_NET_PRFT_DM_A double,STR_NET_PRFT_DM_B double,STR_NET_PRFT_DM_C double,STR_NET_PRFT_DM_D double,STR_NET_PRFT_DM_E double,STR_RET_STR_ID double,STR_RET_REASON_CNT double,STR_RET_TICKET_NO double,STR_RTRN_QTY double,STR_RTRN_AMT double,STR_RTRN_TX double,STR_RTRN_AMT_INC_TX double,STR_RET_FEE double,STR_RTRN_SHIP_CST double,STR_RFNDD_CSH double,STR_REVERSED_CHRG double,STR_STR_CREDIT double,STR_RET_NET_LOSS double,STR_RTRNED_YR_CNT double,STR_RTRN_MM_CNT double,STR_RET_ITM_CNT double,STR_RET_CUST_CNT double,STR_RET_AREA_CNT double,STR_RET_OFFER_C
 NT double,STR_RET_PRM_CNT double,STR_RET_NET_LOSS_DM_A double,STR_RET_NET_LOSS_DM_B double,STR_RET_NET_LOSS_DM_C double,STR_RET_NET_LOSS_DM_D double,OL_ORD_QTY double,OL_WSLE_CST double,OL_LIST_PRICE double,OL_SALES_PRICE double,OL_EXT_DSCNT_AMT double,OL_EXT_SALES_PRICE double,OL_EXT_WSLE_CST double,OL_EXT_LIST_PRICE double,OL_EXT_TX double,OL_COUPON_AMT double,OL_EXT_SHIP_CST double,OL_NET_PAID double,OL_NET_PAID_INC_TX double,OL_NET_PAID_INC_SHIP double,OL_NET_PAID_INC_SHIP_TX double,OL_NET_PRFT double,OL_SOLD_YR_CNT double,OL_SOLD_MM_CNT double,OL_SHIP_DATE_CNT double,OL_ITM_CNT double,OL_BILL_CUST_CNT double,OL_BILL_AREA_CNT double,OL_BILL_DEMO_CNT double,OL_BILL_OFFER_CNT double,OL_SHIP_CUST_CNT double,OL_SHIP_AREA_CNT double,OL_SHIP_DEMO_CNT double,OL_SHIP_OFFER_CNT double,OL_WEB_PAGE_CNT double,OL_WEB_SITE_CNT double,OL_SHIP_MODE_CNT double,OL_WH_CNT double,OL_PRM_CNT double,OL_NET_PRFT_DM_A double,OL_NET_PRFT_DM_B double,OL_NET_PRFT_DM_C double,OL_NET_PRFT_DM_D double,OL_RE
 T_RTRN_QTY double,OL_RTRN_AMT double,OL_RTRN_TX double,OL_RTRN_AMT_INC_TX double,OL_RET_FEE double,OL_RTRN_SHIP_CST double,OL_RFNDD_CSH double,OL_REVERSED_CHRG double,OL_ACCOUNT_CREDIT double,OL_RTRNED_YR_CNT double,OL_RTRNED_MM_CNT double,OL_RTRITM_CNT double,OL_RFNDD_CUST_CNT double,OL_RFNDD_AREA_CNT double,OL_RFNDD_DEMO_CNT double,OL_RFNDD_OFFER_CNT double,OL_RTRNING_CUST_CNT double,OL_RTRNING_AREA_CNT double,OL_RTRNING_DEMO_CNT double,OL_RTRNING_OFFER_CNT double,OL_RTRWEB_PAGE_CNT double,OL_REASON_CNT double,OL_NET_LOSS double,OL_NET_LOSS_DM_A double,OL_NET_LOSS_DM_B double,OL_NET_LOSS_DM_C double) STORED BY 'org.apache.carbondata.format' tblproperties('DICTIONARY_EXCLUDE'='CUST_ID,CUST_NICK_NAME,CUST_FIRST_NAME,CUST_LAST_NAME,CUST_LOGIN,CUST_EMAIL_ADDR,PROD_UNQ_MDL_ID,PROD_UNQ_DEVICE_ADDR,PROD_UQ_UUID,DEVICE_NAME,PROD_BAR_CODE,ITM_ID,ITM_NAME,ITM_BRAND_ID,ITM_BRAND,BOM,PACKING_LIST_NO,TRACKING_NO,ACTIVE_FIRMWARE_VER,LATEST_FIRMWARE_VER,LATEST_EMUI_VERSION,LATEST_NETWORK,STR_ORD
 ER_NO','table_blocksize'='256')""")
     sql(s"""load data LOCAL inpath '$resourcesPath/oscon_10.csv' into table oscon_carbon_old options('DELIMITER'=',', 'QUOTECHAR'='\"','FILEHEADER'='ACTIVE_AREA_ID, ACTIVE_CHECK_DY, ACTIVE_CHECK_HOUR, ACTIVE_CHECK_MM, ACTIVE_CHECK_TIME, ACTIVE_CHECK_YR, ACTIVE_CITY, ACTIVE_COUNTRY, ACTIVE_DISTRICT, ACTIVE_EMUI_VERSION, ACTIVE_FIRMWARE_VER, ACTIVE_NETWORK, ACTIVE_OS_VERSION, ACTIVE_PROVINCE, BOM, CHECK_DATE, CHECK_DY, CHECK_HOUR, CHECK_MM, CHECK_YR, CUST_ADDRESS_ID, CUST_AGE, CUST_BIRTH_COUNTRY, CUST_BIRTH_DY, CUST_BIRTH_MM, CUST_BIRTH_YR, CUST_BUY_POTENTIAL, CUST_CITY, CUST_STATE, CUST_COUNTRY, CUST_COUNTY, CUST_EMAIL_ADDR, CUST_LAST_RVW_DATE, CUST_FIRST_NAME, CUST_ID, CUST_JOB_TITLE, CUST_LAST_NAME, CUST_LOGIN, CUST_NICK_NAME, CUST_PRFRD_FLG, CUST_SEX, CUST_STREET_NAME, CUST_STREET_NO, CUST_SUITE_NO, CUST_ZIP, DELIVERY_CITY, DELIVERY_STATE, DELIVERY_COUNTRY, DELIVERY_DISTRICT, DELIVERY_PROVINCE, DEVICE_NAME, INSIDE_NAME, ITM_BRAND, ITM_BRAND_ID, ITM_CATEGORY, ITM_CATEGORY_ID, ITM_C
 LASS, ITM_CLASS_ID, ITM_COLOR, ITM_CONTAINER, ITM_FORMULATION, ITM_MANAGER_ID, ITM_MANUFACT, ITM_MANUFACT_ID, ITM_ID, ITM_NAME, ITM_REC_END_DATE, ITM_REC_START_DATE, LATEST_AREAID, LATEST_CHECK_DY, LATEST_CHECK_HOUR, LATEST_CHECK_MM, LATEST_CHECK_TIME, LATEST_CHECK_YR, LATEST_CITY, LATEST_COUNTRY, LATEST_DISTRICT, LATEST_EMUI_VERSION, LATEST_FIRMWARE_VER, LATEST_NETWORK, LATEST_OS_VERSION, LATEST_PROVINCE, OL_ORDER_DATE, OL_ORDER_NO, OL_RET_ORDER_NO, OL_RET_DATE, OL_SITE, OL_SITE_DESC, PACKING_DATE, PACKING_DY, PACKING_HOUR, PACKING_LIST_NO, PACKING_MM, PACKING_YR, PRMTION_ID, PRMTION_NAME, PRM_CHANNEL_CAT, PRM_CHANNEL_DEMO, PRM_CHANNEL_DETAILS, PRM_CHANNEL_DMAIL, PRM_CHANNEL_EMAIL, PRM_CHANNEL_EVENT, PRM_CHANNEL_PRESS, PRM_CHANNEL_RADIO, PRM_CHANNEL_TV, PRM_DSCNT_ACTIVE, PRM_END_DATE, PRM_PURPOSE, PRM_START_DATE, PRODUCT_ID, PROD_BAR_CODE, PROD_BRAND_NAME, PRODUCT_NAME, PRODUCT_MODEL, PROD_MODEL_ID, PROD_COLOR, PROD_SHELL_COLOR, PROD_CPU_CLOCK, PROD_IMAGE, PROD_LIVE, PROD_LOC, PROD
 _LONG_DESC, PROD_RAM, PROD_ROM, PROD_SERIES, PROD_SHORT_DESC, PROD_THUMB, PROD_UNQ_DEVICE_ADDR, PROD_UNQ_MDL_ID, PROD_UPDATE_DATE, PROD_UQ_UUID, SHP_CARRIER, SHP_CODE, SHP_CONTRACT, SHP_MODE_ID, SHP_MODE, STR_ORDER_DATE, STR_ORDER_NO, TRACKING_NO, WH_CITY, WH_COUNTRY, WH_COUNTY, WH_ID, WH_NAME, WH_STATE, WH_STREET_NAME, WH_STREET_NO, WH_STREET_TYPE, WH_SUITE_NO, WH_ZIP, CUST_DEP_COUNT, CUST_VEHICLE_COUNT, CUST_ADDRESS_CNT, CUST_CRNT_CDEMO_CNT, CUST_CRNT_HDEMO_CNT, CUST_CRNT_ADDR_DM, CUST_FIRST_SHIPTO_CNT, CUST_FIRST_SALES_CNT, CUST_GMT_OFFSET, CUST_DEMO_CNT, CUST_INCOME, PROD_UNLIMITED, PROD_OFF_PRICE, PROD_UNITS, TOTAL_PRD_COST, TOTAL_PRD_DISC, PROD_WEIGHT, REG_UNIT_PRICE, EXTENDED_AMT, UNIT_PRICE_DSCNT_PCT, DSCNT_AMT, PROD_STD_CST, TOTAL_TX_AMT, FREIGHT_CHRG, WAITING_PERIOD, DELIVERY_PERIOD, ITM_CRNT_PRICE, ITM_UNITS, ITM_WSLE_CST, ITM_SIZE, PRM_CST, PRM_RESPONSE_TARGET, PRM_ITM_DM, SHP_MODE_CNT, WH_GMT_OFFSET, WH_SQ_FT, STR_ORD_QTY, STR_WSLE_CST, STR_LIST_PRICE, STR_SALES_PRICE, 
 STR_EXT_DSCNT_AMT, STR_EXT_SALES_PRICE, STR_EXT_WSLE_CST, STR_EXT_LIST_PRICE, STR_EXT_TX, STR_COUPON_AMT, STR_NET_PAID, STR_NET_PAID_INC_TX, STR_NET_PRFT, STR_SOLD_YR_CNT, STR_SOLD_MM_CNT, STR_SOLD_ITM_CNT, STR_TOTAL_CUST_CNT, STR_AREA_CNT, STR_DEMO_CNT, STR_OFFER_CNT, STR_PRM_CNT, STR_TICKET_CNT, STR_NET_PRFT_DM_A, STR_NET_PRFT_DM_B, STR_NET_PRFT_DM_C, STR_NET_PRFT_DM_D, STR_NET_PRFT_DM_E, STR_RET_STR_ID, STR_RET_REASON_CNT, STR_RET_TICKET_NO, STR_RTRN_QTY, STR_RTRN_AMT, STR_RTRN_TX, STR_RTRN_AMT_INC_TX, STR_RET_FEE, STR_RTRN_SHIP_CST, STR_RFNDD_CSH, STR_REVERSED_CHRG, STR_STR_CREDIT, STR_RET_NET_LOSS, STR_RTRNED_YR_CNT, STR_RTRN_MM_CNT, STR_RET_ITM_CNT, STR_RET_CUST_CNT, STR_RET_AREA_CNT, STR_RET_OFFER_CNT, STR_RET_PRM_CNT, STR_RET_NET_LOSS_DM_A, STR_RET_NET_LOSS_DM_B, STR_RET_NET_LOSS_DM_C, STR_RET_NET_LOSS_DM_D, OL_ORD_QTY, OL_WSLE_CST, OL_LIST_PRICE, OL_SALES_PRICE, OL_EXT_DSCNT_AMT, OL_EXT_SALES_PRICE, OL_EXT_WSLE_CST, OL_EXT_LIST_PRICE, OL_EXT_TX, OL_COUPON_AMT, OL_EXT_SHIP_C
 ST, OL_NET_PAID, OL_NET_PAID_INC_TX, OL_NET_PAID_INC_SHIP, OL_NET_PAID_INC_SHIP_TX, OL_NET_PRFT, OL_SOLD_YR_CNT, OL_SOLD_MM_CNT, OL_SHIP_DATE_CNT, OL_ITM_CNT, OL_BILL_CUST_CNT, OL_BILL_AREA_CNT, OL_BILL_DEMO_CNT, OL_BILL_OFFER_CNT, OL_SHIP_CUST_CNT, OL_SHIP_AREA_CNT, OL_SHIP_DEMO_CNT, OL_SHIP_OFFER_CNT, OL_WEB_PAGE_CNT, OL_WEB_SITE_CNT, OL_SHIP_MODE_CNT, OL_WH_CNT, OL_PRM_CNT, OL_NET_PRFT_DM_A, OL_NET_PRFT_DM_B, OL_NET_PRFT_DM_C, OL_NET_PRFT_DM_D, OL_RET_RTRN_QTY, OL_RTRN_AMT, OL_RTRN_TX, OL_RTRN_AMT_INC_TX, OL_RET_FEE, OL_RTRN_SHIP_CST, OL_RFNDD_CSH, OL_REVERSED_CHRG, OL_ACCOUNT_CREDIT, OL_RTRNED_YR_CNT, OL_RTRNED_MM_CNT, OL_RTRITM_CNT, OL_RFNDD_CUST_CNT, OL_RFNDD_AREA_CNT, OL_RFNDD_DEMO_CNT, OL_RFNDD_OFFER_CNT, OL_RTRNING_CUST_CNT, OL_RTRNING_AREA_CNT, OL_RTRNING_DEMO_CNT, OL_RTRNING_OFFER_CNT, OL_RTRWEB_PAGE_CNT, OL_REASON_CNT, OL_NET_LOSS, OL_NET_LOSS_DM_A, OL_NET_LOSS_DM_B, OL_NET_LOSS_DM_C','BAD_RECORDS_ACTION'='FORCE','BAD_RECORDS_LOGGER_ENABLE'='FALSE')""")
+    sql(
+      """create table oscon_carbon_old1 (CUST_PRFRD_FLG String,PROD_BRAND_NAME String,PROD_COLOR String,CUST_LAST_RVW_DATE String,CUST_COUNTRY String,CUST_CITY String,PRODUCT_NAME String,CUST_JOB_TITLE String,CUST_STATE String,CUST_BUY_POTENTIAL String,PRODUCT_MODEL String,ITM_ID String,ITM_NAME String,PRMTION_ID String,PRMTION_NAME String,SHP_MODE_ID String,SHP_MODE String,DELIVERY_COUNTRY String,DELIVERY_STATE String,DELIVERY_CITY String,DELIVERY_DISTRICT String,ACTIVE_EMUI_VERSION String,WH_NAME String,STR_ORDER_DATE String,OL_ORDER_NO String,OL_ORDER_DATE String,OL_SITE String,CUST_FIRST_NAME String,CUST_LAST_NAME String,CUST_BIRTH_DY String,CUST_BIRTH_MM String,CUST_BIRTH_YR String,CUST_BIRTH_COUNTRY String,CUST_SEX String,CUST_ADDRESS_ID String,CUST_STREET_NO String,CUST_STREET_NAME String,CUST_AGE String,CUST_SUITE_NO String,CUST_ZIP String,CUST_COUNTY String,PRODUCT_ID String,PROD_SHELL_COLOR String,DEVICE_NAME String,PROD_SHORT_DESC String,PROD_LONG_DESC String,PROD_THUMB S
 tring,PROD_IMAGE String,PROD_UPDATE_DATE String,PROD_LIVE String,PROD_LOC String,PROD_RAM String,PROD_ROM String,PROD_CPU_CLOCK String,PROD_SERIES String,ITM_REC_START_DATE String,ITM_REC_END_DATE String,ITM_BRAND_ID String,ITM_BRAND String,ITM_CLASS_ID String,ITM_CLASS String,ITM_CATEGORY_ID String,ITM_CATEGORY String,ITM_MANUFACT_ID String,ITM_MANUFACT String,ITM_FORMULATION String,ITM_COLOR String,ITM_CONTAINER String,ITM_MANAGER_ID String,PRM_START_DATE String,PRM_END_DATE String,PRM_CHANNEL_DMAIL String,PRM_CHANNEL_EMAIL String,PRM_CHANNEL_CAT String,PRM_CHANNEL_TV String,PRM_CHANNEL_RADIO String,PRM_CHANNEL_PRESS String,PRM_CHANNEL_EVENT String,PRM_CHANNEL_DEMO String,PRM_CHANNEL_DETAILS String,PRM_PURPOSE String,PRM_DSCNT_ACTIVE String,SHP_CODE String,SHP_CARRIER String,SHP_CONTRACT String,CHECK_DATE String,CHECK_YR String,CHECK_MM String,CHECK_DY String,CHECK_HOUR String,BOM String,INSIDE_NAME String,PACKING_DATE String,PACKING_YR String,PACKING_MM String,PACKING_DY String,P
 ACKING_HOUR String,DELIVERY_PROVINCE String,PACKING_LIST_NO String,ACTIVE_CHECK_TIME String,ACTIVE_CHECK_YR String,ACTIVE_CHECK_MM String,ACTIVE_CHECK_DY String,ACTIVE_CHECK_HOUR String,ACTIVE_AREA_ID String,ACTIVE_COUNTRY String,ACTIVE_PROVINCE String,ACTIVE_CITY String,ACTIVE_DISTRICT String,ACTIVE_NETWORK String,ACTIVE_FIRMWARE_VER String,ACTIVE_OS_VERSION String,LATEST_CHECK_TIME String,LATEST_CHECK_YR String,LATEST_CHECK_MM String,LATEST_CHECK_DY String,LATEST_CHECK_HOUR String,LATEST_AREAID String,LATEST_COUNTRY String,LATEST_PROVINCE String,LATEST_CITY String,LATEST_DISTRICT String,LATEST_FIRMWARE_VER String,LATEST_EMUI_VERSION String,LATEST_OS_VERSION String,LATEST_NETWORK String,WH_ID String,WH_STREET_NO String,WH_STREET_NAME String,WH_STREET_TYPE String,WH_SUITE_NO String,WH_CITY String,WH_COUNTY String,WH_STATE String,WH_ZIP String,WH_COUNTRY String,OL_SITE_DESC String,OL_RET_ORDER_NO String,OL_RET_DATE String,PROD_MODEL_ID String,CUST_ID String,PROD_UNQ_MDL_ID String,CUS
 T_NICK_NAME String,CUST_LOGIN String,CUST_EMAIL_ADDR String,PROD_UNQ_DEVICE_ADDR String,PROD_UQ_UUID String,PROD_BAR_CODE String,TRACKING_NO String,STR_ORDER_NO String,CUST_DEP_COUNT double,CUST_VEHICLE_COUNT double,CUST_ADDRESS_CNT double,CUST_CRNT_CDEMO_CNT double,CUST_CRNT_HDEMO_CNT double,CUST_CRNT_ADDR_DM double,CUST_FIRST_SHIPTO_CNT double,CUST_FIRST_SALES_CNT double,CUST_GMT_OFFSET double,CUST_DEMO_CNT double,CUST_INCOME double,PROD_UNLIMITED double,PROD_OFF_PRICE double,PROD_UNITS double,TOTAL_PRD_COST double,TOTAL_PRD_DISC double,PROD_WEIGHT double,REG_UNIT_PRICE double,EXTENDED_AMT double,UNIT_PRICE_DSCNT_PCT double,DSCNT_AMT double,PROD_STD_CST double,TOTAL_TX_AMT double,FREIGHT_CHRG double,WAITING_PERIOD double,DELIVERY_PERIOD double,ITM_CRNT_PRICE double,ITM_UNITS double,ITM_WSLE_CST double,ITM_SIZE double,PRM_CST double,PRM_RESPONSE_TARGET double,PRM_ITM_DM double,SHP_MODE_CNT double,WH_GMT_OFFSET double,WH_SQ_FT double,STR_ORD_QTY double,STR_WSLE_CST double,STR_LIST_P
 RICE double,STR_SALES_PRICE double,STR_EXT_DSCNT_AMT double,STR_EXT_SALES_PRICE double,STR_EXT_WSLE_CST double,STR_EXT_LIST_PRICE double,STR_EXT_TX double,STR_COUPON_AMT double,STR_NET_PAID double,STR_NET_PAID_INC_TX double,STR_NET_PRFT double,STR_SOLD_YR_CNT double,STR_SOLD_MM_CNT double,STR_SOLD_ITM_CNT double,STR_TOTAL_CUST_CNT double,STR_AREA_CNT double,STR_DEMO_CNT double,STR_OFFER_CNT double,STR_PRM_CNT double,STR_TICKET_CNT double,STR_NET_PRFT_DM_A double,STR_NET_PRFT_DM_B double,STR_NET_PRFT_DM_C double,STR_NET_PRFT_DM_D double,STR_NET_PRFT_DM_E double,STR_RET_STR_ID double,STR_RET_REASON_CNT double,STR_RET_TICKET_NO double,STR_RTRN_QTY double,STR_RTRN_AMT double,STR_RTRN_TX double,STR_RTRN_AMT_INC_TX double,STR_RET_FEE double,STR_RTRN_SHIP_CST double,STR_RFNDD_CSH double,STR_REVERSED_CHRG double,STR_STR_CREDIT double,STR_RET_NET_LOSS double,STR_RTRNED_YR_CNT double,STR_RTRN_MM_CNT double,STR_RET_ITM_CNT double,STR_RET_CUST_CNT double,STR_RET_AREA_CNT double,STR_RET_OFFER_CN
 T double,STR_RET_PRM_CNT double,STR_RET_NET_LOSS_DM_A double,STR_RET_NET_LOSS_DM_B double,STR_RET_NET_LOSS_DM_C double,STR_RET_NET_LOSS_DM_D double,OL_ORD_QTY double,OL_WSLE_CST double,OL_LIST_PRICE double,OL_SALES_PRICE double,OL_EXT_DSCNT_AMT double,OL_EXT_SALES_PRICE double,OL_EXT_WSLE_CST double,OL_EXT_LIST_PRICE double,OL_EXT_TX double,OL_COUPON_AMT double,OL_EXT_SHIP_CST double,OL_NET_PAID double,OL_NET_PAID_INC_TX double,OL_NET_PAID_INC_SHIP double,OL_NET_PAID_INC_SHIP_TX double,OL_NET_PRFT double,OL_SOLD_YR_CNT double,OL_SOLD_MM_CNT double,OL_SHIP_DATE_CNT double,OL_ITM_CNT double,OL_BILL_CUST_CNT double,OL_BILL_AREA_CNT double,OL_BILL_DEMO_CNT double,OL_BILL_OFFER_CNT double,OL_SHIP_CUST_CNT double,OL_SHIP_AREA_CNT double,OL_SHIP_DEMO_CNT double,OL_SHIP_OFFER_CNT double,OL_WEB_PAGE_CNT double,OL_WEB_SITE_CNT double,OL_SHIP_MODE_CNT double,OL_WH_CNT double,OL_PRM_CNT double,OL_NET_PRFT_DM_A double,OL_NET_PRFT_DM_B double,OL_NET_PRFT_DM_C double,OL_NET_PRFT_DM_D double,OL_RET
 _RTRN_QTY double,OL_RTRN_AMT double,OL_RTRN_TX double,OL_RTRN_AMT_INC_TX double,OL_RET_FEE double,OL_RTRN_SHIP_CST double,OL_RFNDD_CSH double,OL_REVERSED_CHRG double,OL_ACCOUNT_CREDIT double,OL_RTRNED_YR_CNT double,OL_RTRNED_MM_CNT double,OL_RTRITM_CNT double,OL_RFNDD_CUST_CNT double,OL_RFNDD_AREA_CNT double,OL_RFNDD_DEMO_CNT double,OL_RFNDD_OFFER_CNT double,OL_RTRNING_CUST_CNT double,OL_RTRNING_AREA_CNT double,OL_RTRNING_DEMO_CNT double,OL_RTRNING_OFFER_CNT double,OL_RTRWEB_PAGE_CNT double,OL_REASON_CNT double,OL_NET_LOSS double,OL_NET_LOSS_DM_A double,OL_NET_LOSS_DM_B double,OL_NET_LOSS_DM_C double) STORED BY 'org.apache.carbondata.format' tblproperties('DICTIONARY_INCLUDE'='CUST_DEP_COUNT','DICTIONARY_EXCLUDE'='CUST_ID,CUST_NICK_NAME,CUST_FIRST_NAME,CUST_LAST_NAME,CUST_LOGIN,CUST_EMAIL_ADDR,PROD_UNQ_MDL_ID,PROD_UNQ_DEVICE_ADDR,PROD_UQ_UUID,DEVICE_NAME,PROD_BAR_CODE,ITM_ID,ITM_NAME,ITM_BRAND_ID,ITM_BRAND,BOM,PACKING_LIST_NO,TRACKING_NO,ACTIVE_FIRMWARE_VER,LATEST_FIRMWARE_VER,LATES
 T_EMUI_VERSION,LATEST_NETWORK,STR_ORDER_NO','table_blocksize'='256')""".stripMargin)
+    sql(s"""load data LOCAL inpath '$resourcesPath/oscon_10.csv' into table oscon_carbon_old1 options('DELIMITER'=',', 'QUOTECHAR'='\"','FILEHEADER'='ACTIVE_AREA_ID, ACTIVE_CHECK_DY, ACTIVE_CHECK_HOUR, ACTIVE_CHECK_MM, ACTIVE_CHECK_TIME, ACTIVE_CHECK_YR, ACTIVE_CITY, ACTIVE_COUNTRY, ACTIVE_DISTRICT, ACTIVE_EMUI_VERSION, ACTIVE_FIRMWARE_VER, ACTIVE_NETWORK, ACTIVE_OS_VERSION, ACTIVE_PROVINCE, BOM, CHECK_DATE, CHECK_DY, CHECK_HOUR, CHECK_MM, CHECK_YR, CUST_ADDRESS_ID, CUST_AGE, CUST_BIRTH_COUNTRY, CUST_BIRTH_DY, CUST_BIRTH_MM, CUST_BIRTH_YR, CUST_BUY_POTENTIAL, CUST_CITY, CUST_STATE, CUST_COUNTRY, CUST_COUNTY, CUST_EMAIL_ADDR, CUST_LAST_RVW_DATE, CUST_FIRST_NAME, CUST_ID, CUST_JOB_TITLE, CUST_LAST_NAME, CUST_LOGIN, CUST_NICK_NAME, CUST_PRFRD_FLG, CUST_SEX, CUST_STREET_NAME, CUST_STREET_NO, CUST_SUITE_NO, CUST_ZIP, DELIVERY_CITY, DELIVERY_STATE, DELIVERY_COUNTRY, DELIVERY_DISTRICT, DELIVERY_PROVINCE, DEVICE_NAME, INSIDE_NAME, ITM_BRAND, ITM_BRAND_ID, ITM_CATEGORY, ITM_CATEGORY_ID, ITM_
 CLASS, ITM_CLASS_ID, ITM_COLOR, ITM_CONTAINER, ITM_FORMULATION, ITM_MANAGER_ID, ITM_MANUFACT, ITM_MANUFACT_ID, ITM_ID, ITM_NAME, ITM_REC_END_DATE, ITM_REC_START_DATE, LATEST_AREAID, LATEST_CHECK_DY, LATEST_CHECK_HOUR, LATEST_CHECK_MM, LATEST_CHECK_TIME, LATEST_CHECK_YR, LATEST_CITY, LATEST_COUNTRY, LATEST_DISTRICT, LATEST_EMUI_VERSION, LATEST_FIRMWARE_VER, LATEST_NETWORK, LATEST_OS_VERSION, LATEST_PROVINCE, OL_ORDER_DATE, OL_ORDER_NO, OL_RET_ORDER_NO, OL_RET_DATE, OL_SITE, OL_SITE_DESC, PACKING_DATE, PACKING_DY, PACKING_HOUR, PACKING_LIST_NO, PACKING_MM, PACKING_YR, PRMTION_ID, PRMTION_NAME, PRM_CHANNEL_CAT, PRM_CHANNEL_DEMO, PRM_CHANNEL_DETAILS, PRM_CHANNEL_DMAIL, PRM_CHANNEL_EMAIL, PRM_CHANNEL_EVENT, PRM_CHANNEL_PRESS, PRM_CHANNEL_RADIO, PRM_CHANNEL_TV, PRM_DSCNT_ACTIVE, PRM_END_DATE, PRM_PURPOSE, PRM_START_DATE, PRODUCT_ID, PROD_BAR_CODE, PROD_BRAND_NAME, PRODUCT_NAME, PRODUCT_MODEL, PROD_MODEL_ID, PROD_COLOR, PROD_SHELL_COLOR, PROD_CPU_CLOCK, PROD_IMAGE, PROD_LIVE, PROD_LOC, PRO
 D_LONG_DESC, PROD_RAM, PROD_ROM, PROD_SERIES, PROD_SHORT_DESC, PROD_THUMB, PROD_UNQ_DEVICE_ADDR, PROD_UNQ_MDL_ID, PROD_UPDATE_DATE, PROD_UQ_UUID, SHP_CARRIER, SHP_CODE, SHP_CONTRACT, SHP_MODE_ID, SHP_MODE, STR_ORDER_DATE, STR_ORDER_NO, TRACKING_NO, WH_CITY, WH_COUNTRY, WH_COUNTY, WH_ID, WH_NAME, WH_STATE, WH_STREET_NAME, WH_STREET_NO, WH_STREET_TYPE, WH_SUITE_NO, WH_ZIP, CUST_DEP_COUNT, CUST_VEHICLE_COUNT, CUST_ADDRESS_CNT, CUST_CRNT_CDEMO_CNT, CUST_CRNT_HDEMO_CNT, CUST_CRNT_ADDR_DM, CUST_FIRST_SHIPTO_CNT, CUST_FIRST_SALES_CNT, CUST_GMT_OFFSET, CUST_DEMO_CNT, CUST_INCOME, PROD_UNLIMITED, PROD_OFF_PRICE, PROD_UNITS, TOTAL_PRD_COST, TOTAL_PRD_DISC, PROD_WEIGHT, REG_UNIT_PRICE, EXTENDED_AMT, UNIT_PRICE_DSCNT_PCT, DSCNT_AMT, PROD_STD_CST, TOTAL_TX_AMT, FREIGHT_CHRG, WAITING_PERIOD, DELIVERY_PERIOD, ITM_CRNT_PRICE, ITM_UNITS, ITM_WSLE_CST, ITM_SIZE, PRM_CST, PRM_RESPONSE_TARGET, PRM_ITM_DM, SHP_MODE_CNT, WH_GMT_OFFSET, WH_SQ_FT, STR_ORD_QTY, STR_WSLE_CST, STR_LIST_PRICE, STR_SALES_PRICE,
  STR_EXT_DSCNT_AMT, STR_EXT_SALES_PRICE, STR_EXT_WSLE_CST, STR_EXT_LIST_PRICE, STR_EXT_TX, STR_COUPON_AMT, STR_NET_PAID, STR_NET_PAID_INC_TX, STR_NET_PRFT, STR_SOLD_YR_CNT, STR_SOLD_MM_CNT, STR_SOLD_ITM_CNT, STR_TOTAL_CUST_CNT, STR_AREA_CNT, STR_DEMO_CNT, STR_OFFER_CNT, STR_PRM_CNT, STR_TICKET_CNT, STR_NET_PRFT_DM_A, STR_NET_PRFT_DM_B, STR_NET_PRFT_DM_C, STR_NET_PRFT_DM_D, STR_NET_PRFT_DM_E, STR_RET_STR_ID, STR_RET_REASON_CNT, STR_RET_TICKET_NO, STR_RTRN_QTY, STR_RTRN_AMT, STR_RTRN_TX, STR_RTRN_AMT_INC_TX, STR_RET_FEE, STR_RTRN_SHIP_CST, STR_RFNDD_CSH, STR_REVERSED_CHRG, STR_STR_CREDIT, STR_RET_NET_LOSS, STR_RTRNED_YR_CNT, STR_RTRN_MM_CNT, STR_RET_ITM_CNT, STR_RET_CUST_CNT, STR_RET_AREA_CNT, STR_RET_OFFER_CNT, STR_RET_PRM_CNT, STR_RET_NET_LOSS_DM_A, STR_RET_NET_LOSS_DM_B, STR_RET_NET_LOSS_DM_C, STR_RET_NET_LOSS_DM_D, OL_ORD_QTY, OL_WSLE_CST, OL_LIST_PRICE, OL_SALES_PRICE, OL_EXT_DSCNT_AMT, OL_EXT_SALES_PRICE, OL_EXT_WSLE_CST, OL_EXT_LIST_PRICE, OL_EXT_TX, OL_COUPON_AMT, OL_EXT_SHIP_
 CST, OL_NET_PAID, OL_NET_PAID_INC_TX, OL_NET_PAID_INC_SHIP, OL_NET_PAID_INC_SHIP_TX, OL_NET_PRFT, OL_SOLD_YR_CNT, OL_SOLD_MM_CNT, OL_SHIP_DATE_CNT, OL_ITM_CNT, OL_BILL_CUST_CNT, OL_BILL_AREA_CNT, OL_BILL_DEMO_CNT, OL_BILL_OFFER_CNT, OL_SHIP_CUST_CNT, OL_SHIP_AREA_CNT, OL_SHIP_DEMO_CNT, OL_SHIP_OFFER_CNT, OL_WEB_PAGE_CNT, OL_WEB_SITE_CNT, OL_SHIP_MODE_CNT, OL_WH_CNT, OL_PRM_CNT, OL_NET_PRFT_DM_A, OL_NET_PRFT_DM_B, OL_NET_PRFT_DM_C, OL_NET_PRFT_DM_D, OL_RET_RTRN_QTY, OL_RTRN_AMT, OL_RTRN_TX, OL_RTRN_AMT_INC_TX, OL_RET_FEE, OL_RTRN_SHIP_CST, OL_RFNDD_CSH, OL_REVERSED_CHRG, OL_ACCOUNT_CREDIT, OL_RTRNED_YR_CNT, OL_RTRNED_MM_CNT, OL_RTRITM_CNT, OL_RFNDD_CUST_CNT, OL_RFNDD_AREA_CNT, OL_RFNDD_DEMO_CNT, OL_RFNDD_OFFER_CNT, OL_RTRNING_CUST_CNT, OL_RTRNING_AREA_CNT, OL_RTRNING_DEMO_CNT, OL_RTRNING_OFFER_CNT, OL_RTRWEB_PAGE_CNT, OL_REASON_CNT, OL_NET_LOSS, OL_NET_LOSS_DM_A, OL_NET_LOSS_DM_B, OL_NET_LOSS_DM_C','BAD_RECORDS_ACTION'='FORCE','BAD_RECORDS_LOGGER_ENABLE'='FALSE')""")
   }
 
   test("test to check result for double data type") {
     val result = sql("select OL_SALES_PRICE from oscon_carbon_old limit 10").count()
     assert(result.equals(10L))
   }
+  test("test to check result for double data type as dimension") {
+    val result = sql("select CUST_DEP_COUNT from oscon_carbon_old1 limit 10").count()
+    assert(result.equals(10L))
+  }
 
   test("Double Datatype Check with AdaptiveDeltaFloating Codec (BYTE)") {
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
index 937aee9..0dba467 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/bigdecimal/TestBigDecimal.scala
@@ -49,6 +49,9 @@ class TestBigDecimal extends QueryTest with BeforeAndAfterAll {
     sql(s"LOAD DATA local inpath '$resourcesPath/decimalBoundaryDataHive.csv' INTO table hiveBigDecimal")
     sql("create table if not exists carbonBigDecimal_2 (ID Int, date Timestamp, country String, name String, phonetype String, serialname String, salary decimal(30, 10)) STORED BY 'org.apache.carbondata.format'")
     sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/decimalBoundaryDataCarbon.csv' into table carbonBigDecimal_2")
+
+    sql("create table if not exists carbonBigDecimal_3 (ID Int, date Timestamp, country String,name String, phonetype String, serialname String, salary decimal(30, 2)) STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('dictionary_include'='salary')")
+    sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/decimalBoundaryDataCarbon.csv' into table carbonBigDecimal_3")
   }
 
   test("test detail query on big decimal column") {
@@ -188,6 +191,11 @@ class TestBigDecimal extends QueryTest with BeforeAndAfterAll {
       sql("select avg(salary)/10 from hiveBigDecimal"))
   }
 
+  test("test lower precision definiton on big decimal column with high precision value") {
+    checkAnswer(sql("select count(salary) from carbonBigDecimal_3"),
+      sql("select count(salary) from hiveBigDecimal"))
+  }
+
   test("test decimal compression where both precision and data falls in integer range") {
     sql("create table decimal_int_test(d1 decimal(9,3)) stored by 'carbondata'")
     sql(s"load data inpath '$resourcesPath/decimal_int_range.csv' into table decimal_int_test")
@@ -204,6 +212,7 @@ class TestBigDecimal extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists hiveBigDecimal")
     sql("drop table if exists carbonBigDecimal_2")
     sql("DROP TABLE IF EXISTS decimal_int_test")
+    sql("drop table if exists carbonBigDecimal_3")
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.SORT_SIZE,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
index 5ab156c..b483594 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datacompaction/MajorCompactionIgnoreInMinorTest.scala
@@ -21,13 +21,9 @@ import scala.collection.JavaConverters._
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.datastore.block.SegmentTaskIndexWrapper
-import org.apache.carbondata.core.datastore.TableSegmentUniqueIdentifier
 import org.apache.carbondata.core.metadata.CarbonMetadata
 import org.apache.carbondata.core.statusmanager.{SegmentStatus, SegmentStatusManager}
 import org.apache.carbondata.core.util.CarbonProperties
-import org.apache.carbondata.core.util.path.CarbonTablePath
-import org.apache.carbondata.hadoop.CacheClient
 import org.apache.spark.sql.test.util.QueryTest
 
 /**
@@ -138,11 +134,6 @@ class MajorCompactionIgnoreInMinorTest extends QueryTest with BeforeAndAfterAll
     assert(segments.contains("2.1"))
     assert(!segments.contains("2"))
     assert(!segments.contains("3"))
-    val cacheClient = new CacheClient()
-    val segmentIdentifier = new TableSegmentUniqueIdentifier(absoluteTableIdentifier, "2")
-    val wrapper: SegmentTaskIndexWrapper = cacheClient.getSegmentAccessClient.
-      getIfPresent(segmentIdentifier)
-    assert(null == wrapper)
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
index f4c12f7..9866683 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/datamap/FGDataMapTestCase.scala
@@ -491,6 +491,8 @@ class FGDataMapTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"LOAD DATA LOCAL INPATH '$file2' INTO TABLE datamap_test OPTIONS('header'='false')")
     checkAnswer(sql("select * from datamap_test where name='n502670' and city='c2670'"),
       sql("select * from normal_test where name='n502670' and city='c2670'"))
+    checkAnswer(sql("select * from datamap_test where name='n502670' or city='c2670'"),
+      sql("select * from normal_test where name='n502670' or city='c2670'"))
   }
 
   test("test invisible datamap during query") {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/f9114036/integration/spark-common/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark-common/pom.xml b/integration/spark-common/pom.xml
index 599c6c8..0bac9f7 100644
--- a/integration/spark-common/pom.xml
+++ b/integration/spark-common/pom.xml
@@ -31,6 +31,7 @@
 
   <properties>
     <dev.path>${basedir}/../../dev</dev.path>
+    <jacoco.append>true</jacoco.append>
   </properties>
 
   <dependencies>


Mime
View raw message