carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chenliang...@apache.org
Subject [50/52] [partial] incubator-carbondata git commit: move core package
Date Mon, 16 Jan 2017 14:53:27 GMT
http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
index cd2a88c..980970c 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/ColumnDictionaryInfo.java
@@ -26,8 +26,8 @@ import java.util.Date;
 import java.util.List;
 import java.util.concurrent.atomic.AtomicReference;
 
-import org.apache.carbondata.core.carbon.metadata.datatype.DataType;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.util.ByteUtil;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonUtil;

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryCacheLoader.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryCacheLoader.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryCacheLoader.java
index 8da437b..59a93f3 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryCacheLoader.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryCacheLoader.java
@@ -21,7 +21,7 @@ package org.apache.carbondata.core.cache.dictionary;
 
 import java.io.IOException;
 
-import org.apache.carbondata.core.carbon.ColumnIdentifier;
+import org.apache.carbondata.core.metadata.ColumnIdentifier;
 
 public interface DictionaryCacheLoader {
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryCacheLoaderImpl.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryCacheLoaderImpl.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryCacheLoaderImpl.java
index ce3107b..6e2d899 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryCacheLoaderImpl.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryCacheLoaderImpl.java
@@ -24,12 +24,12 @@ import java.util.ArrayList;
 import java.util.Iterator;
 import java.util.List;
 
-import org.apache.carbondata.common.factory.CarbonCommonFactory;
-import org.apache.carbondata.core.carbon.CarbonTableIdentifier;
-import org.apache.carbondata.core.carbon.ColumnIdentifier;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
+import org.apache.carbondata.core.metadata.ColumnIdentifier;
 import org.apache.carbondata.core.reader.CarbonDictionaryReader;
 import org.apache.carbondata.core.reader.sortindex.CarbonDictionarySortIndexReader;
+import org.apache.carbondata.core.service.CarbonCommonFactory;
 import org.apache.carbondata.core.service.DictionaryService;
 import org.apache.carbondata.core.util.CarbonUtil;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryColumnUniqueIdentifier.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryColumnUniqueIdentifier.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryColumnUniqueIdentifier.java
index 1fee082..3e017af 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryColumnUniqueIdentifier.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DictionaryColumnUniqueIdentifier.java
@@ -19,9 +19,9 @@
 
 package org.apache.carbondata.core.cache.dictionary;
 
-import org.apache.carbondata.core.carbon.CarbonTableIdentifier;
-import org.apache.carbondata.core.carbon.ColumnIdentifier;
-import org.apache.carbondata.core.carbon.metadata.datatype.DataType;
+import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
+import org.apache.carbondata.core.metadata.ColumnIdentifier;
+import org.apache.carbondata.core.metadata.datatype.DataType;
 
 /**
  * dictionary column identifier which includes table identifier and column identifier

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
index 13d9233..2207179 100644
--- a/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
+++ b/core/src/main/java/org/apache/carbondata/core/cache/dictionary/DoubleArrayTrieDictionary.java
@@ -19,7 +19,10 @@
 
 package org.apache.carbondata.core.cache.dictionary;
 
-import java.io.*;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.io.PrintStream;
 import java.util.SortedSet;
 import java.util.TreeSet;
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/cache/update/BlockletLevelDeleteDeltaDataCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/cache/update/BlockletLevelDeleteDeltaDataCache.java b/core/src/main/java/org/apache/carbondata/core/cache/update/BlockletLevelDeleteDeltaDataCache.java
new file mode 100644
index 0000000..92350f3
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/cache/update/BlockletLevelDeleteDeltaDataCache.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.cache.update;
+
+import org.roaringbitmap.RoaringBitmap;
+
+/**
+ * This class maintains delete delta data cache of each blocklet along with the block timestamp
+ */
+public class BlockletLevelDeleteDeltaDataCache {
+  private RoaringBitmap deleteDelataDataCache;
+  private String timeStamp;
+
+  public BlockletLevelDeleteDeltaDataCache(int[] deleteDeltaFileData, String timeStamp) {
+    deleteDelataDataCache = RoaringBitmap.bitmapOf(deleteDeltaFileData);
+    this.timeStamp=timeStamp;
+  }
+
+  public boolean contains(int key) {
+    return deleteDelataDataCache.contains(key);
+  }
+
+  public int getSize() {
+    return deleteDelataDataCache.getCardinality();
+  }
+
+  public String getCacheTimeStamp() {
+    return timeStamp;
+  }
+}
+

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/AbsoluteTableIdentifier.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/AbsoluteTableIdentifier.java b/core/src/main/java/org/apache/carbondata/core/carbon/AbsoluteTableIdentifier.java
deleted file mode 100644
index 1424ba5..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/AbsoluteTableIdentifier.java
+++ /dev/null
@@ -1,141 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon;
-
-import java.io.File;
-import java.io.Serializable;
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
-import org.apache.carbondata.core.util.CarbonUtil;
-
-/**
- * identifier which will have store path and carbon table identifier
- */
-public class AbsoluteTableIdentifier implements Serializable {
-
-  /**
-   * serializable version
-   */
-  private static final long serialVersionUID = 4695047103484427506L;
-
-  /**
-   * path of the store
-   */
-  private String storePath;
-
-  /**
-   * carbon table identifier which will have table name and table database
-   * name
-   */
-  private CarbonTableIdentifier carbonTableIdentifier;
-
-  public AbsoluteTableIdentifier(String storePath, CarbonTableIdentifier carbonTableIdentifier) {
-    //TODO this should be moved to common place where path handling will be handled
-    this.storePath = FileFactory.getUpdatedFilePath(storePath);
-    this.carbonTableIdentifier = carbonTableIdentifier;
-  }
-
-  /**
-   * @return the storePath
-   */
-  public String getStorePath() {
-    return storePath;
-  }
-
-  /**
-   * @return the carbonTableIdentifier
-   */
-  public CarbonTableIdentifier getCarbonTableIdentifier() {
-    return carbonTableIdentifier;
-  }
-
-  public static AbsoluteTableIdentifier from(String dbName, String tableName) {
-    CarbonTableIdentifier identifier = new CarbonTableIdentifier(dbName, tableName, "");
-    return new AbsoluteTableIdentifier(CarbonUtil.getCarbonStorePath(), identifier);
-  }
-
-  public static AbsoluteTableIdentifier fromTablePath(String tablePath) {
-    String formattedTablePath = tablePath.replace('\\', '/');
-    String[] names = formattedTablePath.split("/");
-    if (names.length < 3) {
-      throw new IllegalArgumentException("invalid table path: " + tablePath);
-    }
-
-    String tableName = names[names.length - 1];
-    String dbName = names[names.length - 2];
-    String storePath = formattedTablePath.substring(0, formattedTablePath.lastIndexOf(dbName +
-            CarbonCommonConstants.FILE_SEPARATOR + tableName));
-
-    CarbonTableIdentifier identifier =
-        new CarbonTableIdentifier(dbName, tableName, Long.toString(System.currentTimeMillis()));
-    return new AbsoluteTableIdentifier(storePath, identifier);
-  }
-
-  public String getTablePath() {
-    return getStorePath() + File.separator + getCarbonTableIdentifier().getDatabaseName() +
-        File.separator + getCarbonTableIdentifier().getTableName();
-  }
-
-  /**
-   * to get the hash code
-   */
-  @Override public int hashCode() {
-    final int prime = 31;
-    int result = 1;
-    result =
-        prime * result + ((carbonTableIdentifier == null) ? 0 : carbonTableIdentifier.hashCode());
-    result = prime * result + ((storePath == null) ? 0 : storePath.hashCode());
-    return result;
-  }
-
-  /**
-   * to check this class is equal to
-   * other object passed
-   *
-   * @param obj other object
-   */
-  @Override public boolean equals(Object obj) {
-    if (this == obj) {
-      return true;
-    }
-    if (obj == null) {
-      return false;
-    }
-    if (!(obj instanceof AbsoluteTableIdentifier)) {
-      return false;
-    }
-    AbsoluteTableIdentifier other = (AbsoluteTableIdentifier) obj;
-    if (carbonTableIdentifier == null) {
-      if (other.carbonTableIdentifier != null) {
-        return false;
-      }
-    } else if (!carbonTableIdentifier.equals(other.carbonTableIdentifier)) {
-      return false;
-    }
-    if (storePath == null) {
-      if (other.storePath != null) {
-        return false;
-      }
-    } else if (!storePath.equals(other.storePath)) {
-      return false;
-    }
-    return true;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/CarbonDataLoadSchema.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/CarbonDataLoadSchema.java b/core/src/main/java/org/apache/carbondata/core/carbon/CarbonDataLoadSchema.java
deleted file mode 100644
index 45b2bcd..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/CarbonDataLoadSchema.java
+++ /dev/null
@@ -1,184 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon;
-
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.carbondata.core.carbon.metadata.schema.table.CarbonTable;
-
-/**
- * Wrapper Data Load Schema object which will be used to
- * support relation while data loading
- */
-public class CarbonDataLoadSchema implements Serializable {
-
-  /**
-   * default serializer
-   */
-  private static final long serialVersionUID = 1L;
-
-  /**
-   * CarbonTable info
-   */
-  private CarbonTable carbonTable;
-
-  /**
-   * dimension table and relation info
-   */
-  private List<DimensionRelation> dimensionRelationList;
-
-  /**
-   * CarbonDataLoadSchema constructor which takes CarbonTable
-   *
-   * @param carbonTable
-   */
-  public CarbonDataLoadSchema(CarbonTable carbonTable) {
-    this.carbonTable = carbonTable;
-    this.dimensionRelationList = new ArrayList<DimensionRelation>();
-  }
-
-  /**
-   * get dimension relation list
-   *
-   * @return dimensionRelationList
-   */
-  public List<DimensionRelation> getDimensionRelationList() {
-    return dimensionRelationList;
-  }
-
-  /**
-   * get carbontable
-   *
-   * @return carbonTable
-   */
-  public CarbonTable getCarbonTable() {
-    return carbonTable;
-  }
-
-  /**
-   * Dimension Relation object which will be filled from
-   * Load DML Command to support normalized table data load
-   */
-  public static class DimensionRelation implements Serializable {
-    /**
-     * default serializer
-     */
-    private static final long serialVersionUID = 1L;
-
-    /**
-     * dimension tableName
-     */
-    private String tableName;
-
-    /**
-     * relation with fact and dimension table
-     */
-    private Relation relation;
-
-    /**
-     * Columns to selected from dimension table.
-     * Hierarchy in-memory table should be prepared
-     * based on selected columns
-     */
-    private List<String> columns;
-
-    /**
-     * constructor
-     *
-     * @param tableName       - dimension table name
-     * @param relation        - fact foreign key with dimension primary key mapping
-     * @param columns         - list of columns to be used from this dimension table
-     */
-    public DimensionRelation(String tableName, Relation relation,
-        List<String> columns) {
-      this.tableName = tableName;
-      this.relation = relation;
-      this.columns = columns;
-    }
-
-    /**
-     * @return tableName
-     */
-    public String getTableName() {
-      return tableName;
-    }
-
-    /**
-     * @return relation
-     */
-    public Relation getRelation() {
-      return relation;
-    }
-
-    /**
-     * @return columns
-     */
-    public List<String> getColumns() {
-      return columns;
-    }
-  }
-
-  /**
-   * Relation class to specify fact foreignkey column with
-   * dimension primary key column
-   */
-  public static class Relation implements Serializable {
-    /**
-     * default serializer
-     */
-    private static final long serialVersionUID = 1L;
-
-    /**
-     * Fact foreign key column
-     */
-    private String factForeignKeyColumn;
-
-    /**
-     * dimension primary key column
-     */
-    private String dimensionPrimaryKeyColumn;
-
-    /**
-     * constructor
-     *
-     * @param factForeignKeyColumn      - Fact Table Foreign key
-     * @param dimensionPrimaryKeyColumn - Dimension Table primary key
-     */
-    public Relation(String factForeignKeyColumn, String dimensionPrimaryKeyColumn) {
-      this.factForeignKeyColumn = factForeignKeyColumn;
-      this.dimensionPrimaryKeyColumn = dimensionPrimaryKeyColumn;
-    }
-
-    /**
-     * @return factForeignKeyColumn
-     */
-    public String getFactForeignKeyColumn() {
-      return factForeignKeyColumn;
-    }
-
-    /**
-     * @return dimensionPrimaryKeyColumn
-     */
-    public String getDimensionPrimaryKeyColumn() {
-      return dimensionPrimaryKeyColumn;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/CarbonTableIdentifier.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/CarbonTableIdentifier.java b/core/src/main/java/org/apache/carbondata/core/carbon/CarbonTableIdentifier.java
deleted file mode 100644
index 439f7b8..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/CarbonTableIdentifier.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.carbon;
-
-import java.io.File;
-import java.io.Serializable;
-
-/**
- * Identifier class which will hold the table qualified name
- */
-public class CarbonTableIdentifier implements Serializable {
-
-  /**
-   * database name
-   */
-  private String databaseName;
-
-  /**
-   * table name
-   */
-  private String tableName;
-
-  /**
-   * table id
-   */
-  private String tableId;
-
-  /**
-   * constructor
-   */
-  public CarbonTableIdentifier(String databaseName, String tableName, String tableId) {
-    this.databaseName = databaseName;
-    this.tableName = tableName;
-    this.tableId = tableId;
-  }
-
-  /**
-   * return database name
-   */
-  public String getDatabaseName() {
-    return databaseName;
-  }
-
-  /**
-   * return table name
-   */
-  public String getTableName() {
-    return tableName;
-  }
-
-  /**
-   * @return tableId
-   */
-  public String getTableId() {
-    return tableId;
-  }
-
-  /**
-   * @return table unique name
-   */
-  public String getTableUniqueName() {
-    return databaseName + '_' + tableName;
-  }
-
-  /**
-   *Creates the key for bad record lgger.
-   */
-  public String getBadRecordLoggerKey() {
-    return databaseName + File.separator + tableName + '_' + tableName;
-  }
-
-  @Override public int hashCode() {
-    final int prime = 31;
-    int result = 1;
-    result = prime * result + ((databaseName == null) ? 0 : databaseName.hashCode());
-    result = prime * result + ((tableId == null) ? 0 : tableId.hashCode());
-    result = prime * result + ((tableName == null) ? 0 : tableName.hashCode());
-    return result;
-  }
-
-  @Override public boolean equals(Object obj) {
-    if (this == obj) {
-      return true;
-    }
-    if (obj == null) {
-      return false;
-    }
-    if (getClass() != obj.getClass()) {
-      return false;
-    }
-    CarbonTableIdentifier other = (CarbonTableIdentifier) obj;
-    if (databaseName == null) {
-      if (other.databaseName != null) {
-        return false;
-      }
-    } else if (!databaseName.equals(other.databaseName)) {
-      return false;
-    }
-    if (tableId == null) {
-      if (other.tableId != null) {
-        return false;
-      }
-    } else if (!tableId.equals(other.tableId)) {
-      return false;
-    }
-    if (tableName == null) {
-      if (other.tableName != null) {
-        return false;
-      }
-    } else if (!tableName.equals(other.tableName)) {
-      return false;
-    }
-    return true;
-  }
-
-  /*
- * @return table unidque name
- */
-  @Override public String toString() {
-    return databaseName + '_' + tableName;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/ColumnIdentifier.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/ColumnIdentifier.java b/core/src/main/java/org/apache/carbondata/core/carbon/ColumnIdentifier.java
deleted file mode 100644
index 62ae748..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/ColumnIdentifier.java
+++ /dev/null
@@ -1,114 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon;
-
-import java.io.Serializable;
-import java.util.Map;
-
-import org.apache.carbondata.core.carbon.metadata.datatype.DataType;
-
-/**
- * Column unique identifier
- */
-public class ColumnIdentifier implements Serializable {
-
-  /**
-   *
-   */
-  private static final long serialVersionUID = 1L;
-
-  /**
-   * column id
-   */
-  private String columnId;
-
-  /**
-   * column properties
-   */
-  private Map<String, String> columnProperties;
-
-  private DataType dataType;
-
-  /**
-   * @param columnId
-   * @param columnProperties
-   */
-  public ColumnIdentifier(String columnId, Map<String, String> columnProperties,
-      DataType dataType) {
-    this.columnId = columnId;
-    this.columnProperties = columnProperties;
-    this.dataType = dataType;
-  }
-
-  /**
-   * @return columnId
-   */
-  public String getColumnId() {
-    return columnId;
-  }
-
-  /**
-   * @param columnProperty
-   * @return
-   */
-  public String getColumnProperty(String columnProperty) {
-    String property = null;
-    if (null != columnProperties) {
-      property = columnProperties.get(columnProperty);
-    }
-    return property;
-  }
-
-  public DataType getDataType() {
-    return this.dataType;
-  }
-
-  @Override public int hashCode() {
-    final int prime = 31;
-    int result = 1;
-    result = prime * result + ((columnId == null) ? 0 : columnId.hashCode());
-    return result;
-  }
-
-  @Override public boolean equals(Object obj) {
-    if (this == obj) {
-      return true;
-    }
-    if (obj == null) {
-      return false;
-    }
-    if (getClass() != obj.getClass()) {
-      return false;
-    }
-    ColumnIdentifier other = (ColumnIdentifier) obj;
-    if (columnId == null) {
-      if (other.columnId != null) {
-        return false;
-      }
-    } else if (!columnId.equals(other.columnId)) {
-      return false;
-    }
-    return true;
-  }
-
-  @Override public String toString() {
-    return "ColumnIdentifier [columnId=" + columnId + "]";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/ColumnarFormatVersion.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/ColumnarFormatVersion.java b/core/src/main/java/org/apache/carbondata/core/carbon/ColumnarFormatVersion.java
deleted file mode 100644
index 7b67f94..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/ColumnarFormatVersion.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.carbon;
-
-public enum ColumnarFormatVersion {
-  V1((short)1),
-  V2((short)2);
-
-  private short version;
-  ColumnarFormatVersion(short version) {
-    this.version = version;
-  }
-
-  @Override
-  public String toString() {
-    return "ColumnarFormatV" + version;
-  }
-
-  public short number() {
-    return version;
-  }
-
-  public static ColumnarFormatVersion valueOf(short version) {
-    switch (version) {
-      case 0:
-        // before multiple reader support, for existing carbon file, it is version 1
-        return V1;
-      case 1:
-        // after multiple reader support, user can write new file with version 1
-        return V1;
-      default:
-        return V2;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/AbstractBlockIndexStoreCache.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/AbstractBlockIndexStoreCache.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/AbstractBlockIndexStoreCache.java
deleted file mode 100644
index 4e80926..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/AbstractBlockIndexStoreCache.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.carbon.datastore;
-
-import java.io.IOException;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.carbondata.core.cache.Cache;
-import org.apache.carbondata.core.cache.CarbonLRUCache;
-import org.apache.carbondata.core.carbon.datastore.block.AbstractIndex;
-import org.apache.carbondata.core.carbon.datastore.block.BlockInfo;
-import org.apache.carbondata.core.carbon.datastore.block.TableBlockInfo;
-import org.apache.carbondata.core.carbon.datastore.block.TableBlockUniqueIdentifier;
-import org.apache.carbondata.core.carbon.datastore.exception.IndexBuilderException;
-import org.apache.carbondata.core.carbon.metadata.blocklet.DataFileFooter;
-import org.apache.carbondata.core.util.CarbonUtil;
-
-/**
- * This class validate and load the B-Tree in the executor lru cache
- * @param <K> cache key
- * @param <V> Block Meta data details
- */
-public abstract class AbstractBlockIndexStoreCache<K, V>
-    implements Cache<TableBlockUniqueIdentifier, AbstractIndex> {
-  /**
-   * carbon store path
-   */
-  protected String carbonStorePath;
-  /**
-   * CarbonLRU cache
-   */
-  protected CarbonLRUCache lruCache;
-
-  /**
-   * table segment id vs blockInfo list
-   */
-  protected  Map<String, List<BlockInfo>> segmentIdToBlockListMap;
-
-
-  /**
-   * map of block info to lock object map, while loading the btree this will be filled
-   * and removed after loading the tree for that particular block info, this will be useful
-   * while loading the tree concurrently so only block level lock will be applied another
-   * block can be loaded concurrently
-   */
-  protected Map<BlockInfo, Object> blockInfoLock;
-
-  /**
-   * The object will hold the segment ID lock so that at a time only 1 block that belongs to same
-   * segment & table can create the list for holding the block info
-   */
-  protected Map<String, Object> segmentIDLock;
-
-  public AbstractBlockIndexStoreCache(String carbonStorePath, CarbonLRUCache lruCache) {
-    this.carbonStorePath = carbonStorePath;
-    this.lruCache = lruCache;
-    blockInfoLock = new ConcurrentHashMap<BlockInfo, Object>();
-    segmentIDLock= new ConcurrentHashMap<String, Object>();
-    segmentIdToBlockListMap = new ConcurrentHashMap<>();
-  }
-
-  /**
-   * This method will get the value for the given key. If value does not exist
-   * for the given key, it will check and load the value.
-   *
-   * @param tableBlock
-   * @param tableBlockUniqueIdentifier
-   * @param lruCacheKey
-   */
-  protected void checkAndLoadTableBlocks(AbstractIndex tableBlock,
-      TableBlockUniqueIdentifier tableBlockUniqueIdentifier, String lruCacheKey)
-      throws IOException {
-    // calculate the required size is
-    TableBlockInfo blockInfo = tableBlockUniqueIdentifier.getTableBlockInfo();
-    long requiredMetaSize = CarbonUtil.calculateMetaSize(blockInfo);
-    if (requiredMetaSize > 0) {
-      tableBlock.setMemorySize(requiredMetaSize);
-      tableBlock.incrementAccessCount();
-      boolean isTableBlockAddedToLruCache = lruCache.put(lruCacheKey, tableBlock, requiredMetaSize);
-      // if column is successfully added to lru cache then only load the
-      // table blocks data
-      if (isTableBlockAddedToLruCache) {
-        // load table blocks data
-        // getting the data file meta data of the block
-        DataFileFooter footer = CarbonUtil.readMetadatFile(blockInfo);
-        footer.setBlockInfo(new BlockInfo(blockInfo));
-        // building the block
-        tableBlock.buildIndex(Collections.singletonList(footer));
-      } else {
-        throw new IndexBuilderException(
-            "Cannot load table blocks into memory. Not enough memory available");
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BTreeBuilderInfo.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BTreeBuilderInfo.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BTreeBuilderInfo.java
deleted file mode 100644
index 9c090da..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BTreeBuilderInfo.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore;
-
-import java.util.List;
-
-import org.apache.carbondata.core.carbon.metadata.blocklet.DataFileFooter;
-
-/**
- * below class holds the meta data requires to build the blocks
- */
-public class BTreeBuilderInfo {
-
-  /**
-   * holds all the information about data
-   * file meta data
-   */
-  private List<DataFileFooter> footerList;
-
-  /**
-   * size of the each column value size
-   * this will be useful for reading
-   */
-  private int[] dimensionColumnValueSize;
-
-  public BTreeBuilderInfo(List<DataFileFooter> footerList,
-      int[] dimensionColumnValueSize) {
-    this.dimensionColumnValueSize = dimensionColumnValueSize;
-    this.footerList = footerList;
-  }
-
-  /**
-   * @return the eachDimensionBlockSize
-   */
-  public int[] getDimensionColumnValueSize() {
-    return dimensionColumnValueSize;
-  }
-
-  /**
-   * @return the footerList
-   */
-  public List<DataFileFooter> getFooterList() {
-    return footerList;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BlockIndexStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BlockIndexStore.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BlockIndexStore.java
deleted file mode 100644
index cdaedd8..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BlockIndexStore.java
+++ /dev/null
@@ -1,381 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.carbon.datastore;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.Callable;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.Future;
-import java.util.concurrent.TimeUnit;
-
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.cache.CarbonLRUCache;
-import org.apache.carbondata.core.carbon.AbsoluteTableIdentifier;
-import org.apache.carbondata.core.carbon.CarbonTableIdentifier;
-import org.apache.carbondata.core.carbon.datastore.block.AbstractIndex;
-import org.apache.carbondata.core.carbon.datastore.block.BlockIndex;
-import org.apache.carbondata.core.carbon.datastore.block.BlockInfo;
-import org.apache.carbondata.core.carbon.datastore.block.SegmentTaskIndexWrapper;
-import org.apache.carbondata.core.carbon.datastore.block.TableBlockInfo;
-import org.apache.carbondata.core.carbon.datastore.block.TableBlockUniqueIdentifier;
-import org.apache.carbondata.core.carbon.datastore.exception.IndexBuilderException;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-import org.apache.carbondata.core.update.CarbonUpdateUtil;
-import org.apache.carbondata.core.update.UpdateVO;
-import org.apache.carbondata.core.util.CarbonProperties;
-import org.apache.carbondata.scan.model.QueryModel;
-
-
-/**
- * This class is used to load the B-Tree in Executor LRU Cache
- */
-public class BlockIndexStore<K, V> extends AbstractBlockIndexStoreCache<K, V> {
-
-  /**
-   * LOGGER instance
-   */
-  private static final LogService LOGGER =
-      LogServiceFactory.getLogService(BlockIndexStore.class.getName());
-  public BlockIndexStore(String carbonStorePath, CarbonLRUCache lruCache) {
-    super(carbonStorePath, lruCache);
-  }
-
-  /**
-   * The method loads the block meta in B-tree lru cache and returns the block meta.
-   *
-   * @param tableBlockUniqueIdentifier Uniquely identifies the block
-   * @return returns the blocks B-Tree meta
-   */
-  @Override public AbstractIndex get(TableBlockUniqueIdentifier tableBlockUniqueIdentifier)
-      throws IOException {
-    TableBlockInfo tableBlockInfo = tableBlockUniqueIdentifier.getTableBlockInfo();
-    BlockInfo blockInfo = new BlockInfo(tableBlockInfo);
-    String lruCacheKey =
-        getLruCacheKey(tableBlockUniqueIdentifier.getAbsoluteTableIdentifier(), blockInfo);
-    AbstractIndex tableBlock = (AbstractIndex) lruCache.get(lruCacheKey);
-
-    // if block is not loaded
-    if (null == tableBlock) {
-      // check any lock object is present in
-      // block info lock map
-      Object blockInfoLockObject = blockInfoLock.get(blockInfo);
-      // if lock object is not present then acquire
-      // the lock in block info lock and add a lock object in the map for
-      // particular block info, added double checking mechanism to add the lock
-      // object so in case of concurrent query we for same block info only one lock
-      // object will be added
-      if (null == blockInfoLockObject) {
-        synchronized (blockInfoLock) {
-          // again checking the block info lock, to check whether lock object is present
-          // or not if now also not present then add a lock object
-          blockInfoLockObject = blockInfoLock.get(blockInfo);
-          if (null == blockInfoLockObject) {
-            blockInfoLockObject = new Object();
-            blockInfoLock.put(blockInfo, blockInfoLockObject);
-          }
-        }
-      }
-      //acquire the lock for particular block info
-      synchronized (blockInfoLockObject) {
-        // check again whether block is present or not to avoid the
-        // same block is loaded
-        //more than once in case of concurrent query
-        tableBlock = (AbstractIndex) lruCache.get(
-            getLruCacheKey(tableBlockUniqueIdentifier.getAbsoluteTableIdentifier(), blockInfo));
-        // if still block is not present then load the block
-        if (null == tableBlock) {
-          tableBlock = loadBlock(tableBlockUniqueIdentifier);
-          fillSegmentIdToBlockListMap(tableBlockUniqueIdentifier.getAbsoluteTableIdentifier(),
-              blockInfo);
-        }
-      }
-    } else {
-      tableBlock.incrementAccessCount();
-    }
-    return tableBlock;
-  }
-
-  /**
-   * @param absoluteTableIdentifier
-   * @param blockInfo
-   */
-  private void fillSegmentIdToBlockListMap(AbsoluteTableIdentifier absoluteTableIdentifier,
-      BlockInfo blockInfo) {
-    TableSegmentUniqueIdentifier segmentIdentifier =
-        new TableSegmentUniqueIdentifier(absoluteTableIdentifier,
-            blockInfo.getTableBlockInfo().getSegmentId());
-    String uniqueTableSegmentIdentifier = segmentIdentifier.getUniqueTableSegmentIdentifier();
-    List<BlockInfo> blockInfos =
-        segmentIdToBlockListMap.get(uniqueTableSegmentIdentifier);
-    if (null == blockInfos) {
-      Object segmentLockObject = segmentIDLock.get(uniqueTableSegmentIdentifier);
-      if (null == segmentLockObject) {
-        synchronized (segmentIDLock) {
-          segmentLockObject = segmentIDLock.get(uniqueTableSegmentIdentifier);
-          if (null == segmentLockObject) {
-            segmentLockObject = new Object();
-            segmentIDLock.put(uniqueTableSegmentIdentifier, segmentLockObject);
-          }
-        }
-      }
-      synchronized (segmentLockObject) {
-        blockInfos =
-            segmentIdToBlockListMap.get(segmentIdentifier.getUniqueTableSegmentIdentifier());
-        if (null == blockInfos) {
-          blockInfos = new CopyOnWriteArrayList<>();
-          segmentIdToBlockListMap.put(uniqueTableSegmentIdentifier, blockInfos);
-        }
-        blockInfos.add(blockInfo);
-      }
-    } else {
-      blockInfos.add(blockInfo);
-    }
-  }
-
-  /**
-   * The method takes list of tableblocks as input and load them in btree lru cache
-   * and returns the list of data blocks meta
-   *
-   * @param tableBlocksInfos List of unique table blocks
-   * @return List<AbstractIndex>
-   * @throws IndexBuilderException
-   */
-  @Override public List<AbstractIndex> getAll(List<TableBlockUniqueIdentifier> tableBlocksInfos)
-      throws IndexBuilderException {
-    AbstractIndex[] loadedBlock = new AbstractIndex[tableBlocksInfos.size()];
-    int numberOfCores = 1;
-    try {
-      numberOfCores = Integer.parseInt(CarbonProperties.getInstance()
-          .getProperty(CarbonCommonConstants.NUM_CORES,
-              CarbonCommonConstants.NUM_CORES_DEFAULT_VAL));
-    } catch (NumberFormatException e) {
-      numberOfCores = Integer.parseInt(CarbonCommonConstants.NUM_CORES_DEFAULT_VAL);
-    }
-    ExecutorService executor = Executors.newFixedThreadPool(numberOfCores);
-    List<Future<AbstractIndex>> blocksList = new ArrayList<Future<AbstractIndex>>();
-    for (TableBlockUniqueIdentifier tableBlockUniqueIdentifier : tableBlocksInfos) {
-      blocksList.add(executor.submit(new BlockLoaderThread(tableBlockUniqueIdentifier)));
-    }
-    // shutdown the executor gracefully and wait until all the task is finished
-    executor.shutdown();
-    try {
-      executor.awaitTermination(1, TimeUnit.HOURS);
-    } catch (InterruptedException e) {
-      throw new IndexBuilderException(e);
-    }
-    // fill the block which were not loaded before to loaded blocks array
-    fillLoadedBlocks(loadedBlock, blocksList);
-    return Arrays.asList(loadedBlock);
-  }
-
-  private String getLruCacheKey(AbsoluteTableIdentifier absoluteTableIdentifier,
-      BlockInfo blockInfo) {
-    CarbonTableIdentifier carbonTableIdentifier =
-        absoluteTableIdentifier.getCarbonTableIdentifier();
-    return carbonTableIdentifier.getDatabaseName() + CarbonCommonConstants.FILE_SEPARATOR
-        + carbonTableIdentifier.getTableName() + CarbonCommonConstants.UNDERSCORE
-        + carbonTableIdentifier.getTableId() + CarbonCommonConstants.FILE_SEPARATOR + blockInfo
-        .getBlockUniqueName();
-  }
-
-  /**
-   * method returns the B-Tree meta
-   *
-   * @param tableBlockUniqueIdentifier Unique table block info
-   * @return
-   */
-  @Override public AbstractIndex getIfPresent(
-      TableBlockUniqueIdentifier tableBlockUniqueIdentifier) {
-    BlockInfo blockInfo = new BlockInfo(tableBlockUniqueIdentifier.getTableBlockInfo());
-    BlockIndex cacheable = (BlockIndex) lruCache
-        .get(getLruCacheKey(tableBlockUniqueIdentifier.getAbsoluteTableIdentifier(), blockInfo));
-    if (null != cacheable) {
-      cacheable.incrementAccessCount();
-    }
-    return cacheable;
-  }
-
-  /**
-   * the method removes the entry from cache.
-   *
-   * @param tableBlockUniqueIdentifier
-   */
-  @Override public void invalidate(TableBlockUniqueIdentifier tableBlockUniqueIdentifier) {
-    BlockInfo blockInfo = new BlockInfo(tableBlockUniqueIdentifier.getTableBlockInfo());
-    lruCache
-        .remove(getLruCacheKey(tableBlockUniqueIdentifier.getAbsoluteTableIdentifier(), blockInfo));
-  }
-
-  @Override public void clearAccessCount(List<TableBlockUniqueIdentifier> keys) {
-    for (TableBlockUniqueIdentifier tableBlockUniqueIdentifier : keys) {
-      SegmentTaskIndexWrapper cacheable = (SegmentTaskIndexWrapper) lruCache
-          .get(tableBlockUniqueIdentifier.getUniqueTableBlockName());
-      cacheable.clear();
-    }
-  }
-
-  /**
-   * Below method will be used to fill the loaded blocks to the array
-   * which will be used for query execution
-   *
-   * @param loadedBlockArray array of blocks which will be filled
-   * @param blocksList       blocks loaded in thread
-   * @throws IndexBuilderException in case of any failure
-   */
-  private void fillLoadedBlocks(AbstractIndex[] loadedBlockArray,
-      List<Future<AbstractIndex>> blocksList) throws IndexBuilderException {
-    int blockCounter = 0;
-    boolean exceptionOccurred = false;
-    Throwable exceptionRef = null;
-    for (int i = 0; i < loadedBlockArray.length; i++) {
-      try {
-        loadedBlockArray[i] = blocksList.get(blockCounter++).get();
-      } catch (Throwable e) {
-        exceptionOccurred = true;
-        exceptionRef = e;
-      }
-    }
-    if (exceptionOccurred) {
-      LOGGER.error("Block B-tree loading failed. Clearing the access count of the loaded blocks.");
-      // in case of any failure clear the access count for the valid loaded blocks
-      clearAccessCountForLoadedBlocks(loadedBlockArray);
-      throw new IndexBuilderException("Block B-tree loading failed", exceptionRef);
-    }
-  }
-
-  /**
-   * This method will clear the access count for the loaded blocks
-   *
-   * @param loadedBlockArray
-   */
-  private void clearAccessCountForLoadedBlocks(AbstractIndex[] loadedBlockArray) {
-    for (int i = 0; i < loadedBlockArray.length; i++) {
-      if (null != loadedBlockArray[i]) {
-        loadedBlockArray[i].clear();
-      }
-    }
-  }
-
-  /**
-   * Thread class which will be used to load the blocks
-   */
-  private class BlockLoaderThread implements Callable<AbstractIndex> {
-    // table  block unique identifier
-    private TableBlockUniqueIdentifier tableBlockUniqueIdentifier;
-
-    private BlockLoaderThread(TableBlockUniqueIdentifier tableBlockUniqueIdentifier) {
-      this.tableBlockUniqueIdentifier = tableBlockUniqueIdentifier;
-    }
-
-    @Override public AbstractIndex call() throws Exception {
-      // load and return the loaded blocks
-      return get(tableBlockUniqueIdentifier);
-    }
-  }
-
-  private AbstractIndex loadBlock(TableBlockUniqueIdentifier tableBlockUniqueIdentifier)
-      throws IOException {
-    AbstractIndex tableBlock = new BlockIndex();
-    BlockInfo blockInfo = new BlockInfo(tableBlockUniqueIdentifier.getTableBlockInfo());
-    String lruCacheKey =
-        getLruCacheKey(tableBlockUniqueIdentifier.getAbsoluteTableIdentifier(), blockInfo);
-    checkAndLoadTableBlocks(tableBlock, tableBlockUniqueIdentifier, lruCacheKey);
-    // finally remove the lock object from block info lock as once block is loaded
-    // it will not come inside this if condition
-    blockInfoLock.remove(blockInfo);
-    return tableBlock;
-  }
-
-  /**
-   * This will be used to remove a particular blocks useful in case of
-   * deletion of some of the blocks in case of retention or may be some other
-   * scenario
-   *
-   * @param segmentIds              list of table blocks to be removed
-   * @param absoluteTableIdentifier absolute table identifier
-   */
-  public void removeTableBlocks(List<String> segmentIds,
-      AbsoluteTableIdentifier absoluteTableIdentifier) {
-    if (null == segmentIds) {
-      return;
-    }
-    for (String segmentId : segmentIds) {
-      TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier =
-          new TableSegmentUniqueIdentifier(absoluteTableIdentifier, segmentId);
-      List<BlockInfo> blockInfos = segmentIdToBlockListMap
-          .remove(tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier());
-      if (null != blockInfos) {
-        for (BlockInfo blockInfo : blockInfos) {
-          String lruCacheKey = getLruCacheKey(absoluteTableIdentifier, blockInfo);
-          lruCache.remove(lruCacheKey);
-        }
-      }
-    }
-  }
-
-  /**
-   * remove TableBlocks executer level If Horizontal Compaction Done
-   * @param queryModel
-   */
-  public void removeTableBlocksIfHorizontalCompactionDone(QueryModel queryModel) {
-    // get the invalid segments blocks details
-    Map<String, UpdateVO> invalidBlocksVO = queryModel.getInvalidBlockVOForSegmentId();
-    if (!invalidBlocksVO.isEmpty()) {
-      UpdateVO updateMetadata;
-      Iterator<Map.Entry<String, UpdateVO>> itr = invalidBlocksVO.entrySet().iterator();
-      String blockTimestamp = null;
-      while (itr.hasNext()) {
-        Map.Entry<String, UpdateVO> entry = itr.next();
-        TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier =
-            new TableSegmentUniqueIdentifier(queryModel.getAbsoluteTableIdentifier(),
-                entry.getKey());
-        List<BlockInfo> blockInfos = segmentIdToBlockListMap
-            .get(tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier());
-        if (null != blockInfos) {
-          for (BlockInfo blockInfo : blockInfos) {
-            // reading the updated block names from status manager instance
-            blockTimestamp = blockInfo.getBlockUniqueName()
-                .substring(blockInfo.getBlockUniqueName().lastIndexOf('-') + 1,
-                    blockInfo.getBlockUniqueName().length());
-            updateMetadata = entry.getValue();
-            if (CarbonUpdateUtil.isMaxQueryTimeoutExceeded(Long.parseLong(blockTimestamp))) {
-              Long blockTimeStamp = Long.parseLong(blockTimestamp);
-              if (blockTimeStamp > updateMetadata.getFactTimestamp() && (
-                  updateMetadata.getUpdateDeltaStartTimestamp() != null
-                      && blockTimeStamp < updateMetadata.getUpdateDeltaStartTimestamp())) {
-                String lruCacheKey =
-                    getLruCacheKey(queryModel.getAbsoluteTableIdentifier(), blockInfo);
-                lruCache.remove(lruCacheKey);
-              }
-            }
-          }
-        }
-      }
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BtreeBuilder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BtreeBuilder.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BtreeBuilder.java
deleted file mode 100644
index fb59607..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/BtreeBuilder.java
+++ /dev/null
@@ -1,38 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore;
-
-/**
- * Below interface will be used to build the index
- * in some data structure
- */
-public interface BtreeBuilder {
-
-  /**
-   * Below method will be used to store the leaf collection in some data structure
-   */
-  void build(BTreeBuilderInfo blocksBuilderInfos);
-
-  /**
-   * below method to get the first data block
-   *
-   * @return data block
-   */
-  DataRefNode get();
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNode.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNode.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNode.java
deleted file mode 100644
index 0323d55..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNode.java
+++ /dev/null
@@ -1,128 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore;
-
-import java.io.IOException;
-
-import org.apache.carbondata.common.iudprocessor.cache.BlockletLevelDeleteDeltaDataCache;
-import org.apache.carbondata.core.carbon.datastore.chunk.DimensionColumnDataChunk;
-import org.apache.carbondata.core.carbon.datastore.chunk.MeasureColumnDataChunk;
-import org.apache.carbondata.core.datastorage.store.FileHolder;
-
-/**
- * Interface data block reference
- */
-public interface DataRefNode {
-
-  /**
-   * Method to get the next block this can be used while scanning when
-   * iterator of this class can be used iterate over blocks
-   *
-   * @return next block
-   */
-  DataRefNode getNextDataRefNode();
-
-  /**
-   * to get the number of keys tuples present in the block
-   *
-   * @return number of keys in the block
-   */
-  int nodeSize();
-
-  /**
-   * Method can be used to get the block index .This can be used when multiple
-   * thread can be used scan group of blocks in that can we can assign the
-   * some of the blocks to one thread and some to other
-   *
-   * @return block number
-   */
-  long nodeNumber();
-
-  /**
-   * This method will be used to get the max value of all the columns this can
-   * be used in case of filter query
-   *
-   */
-  byte[][] getColumnsMaxValue();
-
-  /**
-   * This method will be used to get the min value of all the columns this can
-   * be used in case of filter query
-   *
-   */
-  byte[][] getColumnsMinValue();
-
-  /**
-   * Below method will be used to get the dimension chunks
-   *
-   * @param fileReader   file reader to read the chunks from file
-   * @param blockIndexes range indexes of the blocks need to be read
-   *                     value can be {{0,10},{11,12},{13,13}}
-   *                     here 0 to 10 and 11 to 12 column blocks will be read in one
-   *                     IO operation 13th column block will be read separately
-   *                     This will be helpful to reduce IO by reading bigger chunk of
-   *                     data in On IO
-   * @return dimension data chunks
-   */
-  DimensionColumnDataChunk[] getDimensionChunks(FileHolder fileReader, int[][] blockIndexes)
-      throws IOException;
-
-  /**
-   * Below method will be used to get the dimension chunk
-   *
-   * @param fileReader file reader to read the chunk from file
-   * @return dimension data chunk
-   */
-  DimensionColumnDataChunk getDimensionChunk(FileHolder fileReader, int blockIndexes)
-      throws IOException;
-
-  /**
-   * Below method will be used to get the measure chunk
-   *
-   * @param fileReader   file reader to read the chunk from file
-   * @param blockIndexes range indexes of the blocks need to be read
-   *                     value can be {{0,10},{11,12},{13,13}}
-   *                     here 0 to 10 and 11 to 12 column blocks will be read in one
-   *                     IO operation 13th column block will be read separately
-   *                     This will be helpful to reduce IO by reading bigger chunk of
-   *                     data in On IO
-   * @return measure column data chunk
-   */
-  MeasureColumnDataChunk[] getMeasureChunks(FileHolder fileReader, int[][] blockIndexes)
-      throws IOException;
-
-  /**
-   * Below method will be used to read the measure chunk
-   *
-   * @param fileReader file read to read the file chunk
-   * @param blockIndex block index to be read from file
-   * @return measure data chunk
-   */
-  MeasureColumnDataChunk getMeasureChunk(FileHolder fileReader, int blockIndex) throws IOException;
-
-  /**
-   * @param deleteDeltaDataCache
-   */
-  void setDeleteDeltaDataCache(BlockletLevelDeleteDeltaDataCache deleteDeltaDataCache);
-
-  /**
-   * @return
-   */
-  BlockletLevelDeleteDeltaDataCache getDeleteDeltaDataCache();
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNodeFinder.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNodeFinder.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNodeFinder.java
deleted file mode 100644
index c8edc0f..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/DataRefNodeFinder.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore;
-
-/**
- * Below Interface is to search a block
- */
-public interface DataRefNodeFinder {
-
-  /**
-   * Below method will be used to get the first tentative block which matches with
-   * the search key
-   *
-   * @param dataBlocks complete data blocks present
-   * @return data block
-   */
-  DataRefNode findFirstDataBlock(DataRefNode dataBlocks, IndexKey searchKey);
-
-  /**
-   * Below method will be used to get the last tentative block which matches with
-   * the search key
-   *
-   * @param dataBlocks complete data blocks present
-   * @return data block
-   */
-  DataRefNode findLastDataBlock(DataRefNode dataBlocks, IndexKey searchKey);
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/IndexKey.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/IndexKey.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/IndexKey.java
deleted file mode 100644
index cefd32c..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/IndexKey.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore;
-
-/**
- * Index class to store the index of the segment blocklet infos
- */
-public class IndexKey {
-
-  /**
-   * key which is generated from key generator
-   */
-  private byte[] dictionaryKeys;
-
-  /**
-   * key which was no generated using key generator
-   * <Index of FirstKey (2 bytes)><Index of SecondKey (2 bytes)><Index of NKey (2 bytes)>
-   * <First Key ByteArray><2nd Key ByteArray><N Key ByteArray>
-   */
-  private byte[] noDictionaryKeys;
-
-  public IndexKey(byte[] dictionaryKeys, byte[] noDictionaryKeys) {
-    this.dictionaryKeys = dictionaryKeys;
-    this.noDictionaryKeys = noDictionaryKeys;
-    if (null == dictionaryKeys) {
-      this.dictionaryKeys = new byte[0];
-    }
-    if (null == noDictionaryKeys) {
-      this.noDictionaryKeys = new byte[0];
-    }
-  }
-
-  /**
-   * @return the dictionaryKeys
-   */
-  public byte[] getDictionaryKeys() {
-    return dictionaryKeys;
-  }
-
-  /**
-   * @return the noDictionaryKeys
-   */
-  public byte[] getNoDictionaryKeys() {
-    return noDictionaryKeys;
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/SegmentTaskIndexStore.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/SegmentTaskIndexStore.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/SegmentTaskIndexStore.java
deleted file mode 100644
index 4d67dd6..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/SegmentTaskIndexStore.java
+++ /dev/null
@@ -1,401 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.core.carbon.datastore;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.concurrent.ConcurrentHashMap;
-
-import org.apache.carbondata.common.logging.LogService;
-import org.apache.carbondata.common.logging.LogServiceFactory;
-import org.apache.carbondata.core.cache.Cache;
-import org.apache.carbondata.core.cache.CarbonLRUCache;
-import org.apache.carbondata.core.carbon.AbsoluteTableIdentifier;
-import org.apache.carbondata.core.carbon.datastore.block.AbstractIndex;
-import org.apache.carbondata.core.carbon.datastore.block.SegmentTaskIndex;
-import org.apache.carbondata.core.carbon.datastore.block.SegmentTaskIndexWrapper;
-import org.apache.carbondata.core.carbon.datastore.block.TableBlockInfo;
-import org.apache.carbondata.core.carbon.datastore.exception.IndexBuilderException;
-import org.apache.carbondata.core.carbon.metadata.blocklet.DataFileFooter;
-import org.apache.carbondata.core.carbon.path.CarbonTablePath;
-import org.apache.carbondata.core.carbon.path.CarbonTablePath.DataFileUtil;
-import org.apache.carbondata.core.update.UpdateVO;
-import org.apache.carbondata.core.updatestatus.SegmentUpdateStatusManager;
-import org.apache.carbondata.core.util.CarbonUtil;
-
-/**
- * Class to handle loading, unloading,clearing,storing of the table
- * blocks
- */
-public class SegmentTaskIndexStore
-    implements Cache<TableSegmentUniqueIdentifier, SegmentTaskIndexWrapper> {
-  private static final LogService LOGGER =
-      LogServiceFactory.getLogService(SegmentTaskIndexStore.class.getName());
-  /**
-   * carbon store path
-   */
-  protected String carbonStorePath;
-  /**
-   * CarbonLRU cache
-   */
-  protected CarbonLRUCache lruCache;
-
-  /**
-   * map of block info to lock object map, while loading the btree this will be filled
-   * and removed after loading the tree for that particular block info, this will be useful
-   * while loading the tree concurrently so only block level lock will be applied another
-   * block can be loaded concurrently
-   */
-  private Map<String, Object> segmentLockMap;
-
-  /**
-   * constructor to initialize the SegmentTaskIndexStore
-   *
-   * @param carbonStorePath
-   * @param lruCache
-   */
-  public SegmentTaskIndexStore(String carbonStorePath, CarbonLRUCache lruCache) {
-    this.carbonStorePath = carbonStorePath;
-    this.lruCache = lruCache;
-    segmentLockMap = new ConcurrentHashMap<String, Object>();
-  }
-
-  @Override
-  public SegmentTaskIndexWrapper get(TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier)
-      throws IOException {
-    SegmentTaskIndexWrapper segmentTaskIndexWrapper = null;
-    try {
-      segmentTaskIndexWrapper =
-          loadAndGetTaskIdToSegmentsMap(tableSegmentUniqueIdentifier.getSegmentToTableBlocksInfos(),
-              tableSegmentUniqueIdentifier.getAbsoluteTableIdentifier(),
-              tableSegmentUniqueIdentifier);
-    } catch (IndexBuilderException e) {
-      throw new IOException(e.getMessage(), e);
-    } catch (Throwable e) {
-      throw new IOException("Problem in loading segment block.", e);
-    }
-    return segmentTaskIndexWrapper;
-  }
-
-  @Override public List<SegmentTaskIndexWrapper> getAll(
-      List<TableSegmentUniqueIdentifier> tableSegmentUniqueIdentifiers) throws IOException {
-    List<SegmentTaskIndexWrapper> segmentTaskIndexWrappers =
-        new ArrayList<>(tableSegmentUniqueIdentifiers.size());
-    try {
-      for (TableSegmentUniqueIdentifier segmentUniqueIdentifier : tableSegmentUniqueIdentifiers) {
-        segmentTaskIndexWrappers.add(get(segmentUniqueIdentifier));
-      }
-    } catch (Throwable e) {
-      for (SegmentTaskIndexWrapper segmentTaskIndexWrapper : segmentTaskIndexWrappers) {
-        segmentTaskIndexWrapper.clear();
-      }
-      throw new IOException("Problem in loading segment blocks.", e);
-    }
-    return segmentTaskIndexWrappers;
-  }
-
-  /**
-   * returns the SegmentTaskIndexWrapper
-   *
-   * @param tableSegmentUniqueIdentifier
-   * @return
-   */
-  @Override public SegmentTaskIndexWrapper getIfPresent(
-      TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier) {
-    SegmentTaskIndexWrapper segmentTaskIndexWrapper = (SegmentTaskIndexWrapper) lruCache
-        .get(tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier());
-    if (null != segmentTaskIndexWrapper) {
-      segmentTaskIndexWrapper.incrementAccessCount();
-    }
-    return segmentTaskIndexWrapper;
-  }
-
-  /**
-   * method invalidate the segment cache for segment
-   *
-   * @param tableSegmentUniqueIdentifier
-   */
-  @Override public void invalidate(TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier) {
-    lruCache.remove(tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier());
-  }
-
-  /**
-   * returns block timestamp value from the given task
-   * @param taskKey
-   * @param listOfUpdatedFactFiles
-   * @return
-   */
-  private String getTimeStampValueFromBlock(String taskKey, List<String> listOfUpdatedFactFiles) {
-    for (String blockName : listOfUpdatedFactFiles) {
-      if (taskKey.equals(CarbonTablePath.DataFileUtil.getTaskNo(blockName))) {
-        blockName = blockName.substring(blockName.lastIndexOf('-') + 1, blockName.lastIndexOf('.'));
-        return blockName;
-      }
-    }
-    return null;
-  }
-
-  /**
-   * Below method will be used to load the segment of segments
-   * One segment may have multiple task , so  table segment will be loaded
-   * based on task id and will return the map of taksId to table segment
-   * map
-   *
-   * @param segmentToTableBlocksInfos segment id to block info
-   * @param absoluteTableIdentifier   absolute table identifier
-   * @return map of taks id to segment mapping
-   * @throws IOException
-   */
-  private SegmentTaskIndexWrapper loadAndGetTaskIdToSegmentsMap(
-      Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos,
-      AbsoluteTableIdentifier absoluteTableIdentifier,
-      TableSegmentUniqueIdentifier tableSegmentUniqueIdentifier) throws IOException {
-    // task id to segment map
-    Iterator<Map.Entry<String, List<TableBlockInfo>>> iteratorOverSegmentBlocksInfos =
-        segmentToTableBlocksInfos.entrySet().iterator();
-    Map<TaskBucketHolder, AbstractIndex> taskIdToSegmentIndexMap = null;
-    SegmentTaskIndexWrapper segmentTaskIndexWrapper = null;
-    SegmentUpdateStatusManager updateStatusManager =
-        new SegmentUpdateStatusManager(absoluteTableIdentifier);
-    String segmentId = null;
-    TaskBucketHolder taskBucketHolder = null;
-    try {
-      while (iteratorOverSegmentBlocksInfos.hasNext()) {
-        // segment id to table block mapping
-        Map.Entry<String, List<TableBlockInfo>> next = iteratorOverSegmentBlocksInfos.next();
-        // group task id to table block info mapping for the segment
-        Map<TaskBucketHolder, List<TableBlockInfo>> taskIdToTableBlockInfoMap =
-            mappedAndGetTaskIdToTableBlockInfo(segmentToTableBlocksInfos);
-        segmentId = next.getKey();
-        // get the existing map of task id to table segment map
-        UpdateVO updateVO = updateStatusManager.getInvalidTimestampRange(segmentId);
-        // check if segment is already loaded, if segment is already loaded
-        //no need to load the segment block
-        String lruCacheKey = tableSegmentUniqueIdentifier.getUniqueTableSegmentIdentifier();
-        segmentTaskIndexWrapper = (SegmentTaskIndexWrapper) lruCache.get(lruCacheKey);
-        if (segmentTaskIndexWrapper == null || tableSegmentUniqueIdentifier.isSegmentUpdated()) {
-          // get the segment loader lock object this is to avoid
-          // same segment is getting loaded multiple times
-          // in case of concurrent query
-          Object segmentLoderLockObject = segmentLockMap.get(lruCacheKey);
-          if (null == segmentLoderLockObject) {
-            segmentLoderLockObject = addAndGetSegmentLock(lruCacheKey);
-          }
-          // acquire lock to lod the segment
-          synchronized (segmentLoderLockObject) {
-            segmentTaskIndexWrapper = (SegmentTaskIndexWrapper) lruCache.get(lruCacheKey);
-            if (null == segmentTaskIndexWrapper || tableSegmentUniqueIdentifier
-                .isSegmentUpdated()) {
-              // if the segment is updated then get the existing block task id map details
-              // so that the same can be updated after loading the btree.
-              if (tableSegmentUniqueIdentifier.isSegmentUpdated()
-                  && null != segmentTaskIndexWrapper) {
-                taskIdToSegmentIndexMap = segmentTaskIndexWrapper.getTaskIdToTableSegmentMap();
-              } else {
-                // creating a map of take if to table segment
-                taskIdToSegmentIndexMap = new HashMap<TaskBucketHolder, AbstractIndex>();
-                segmentTaskIndexWrapper = new SegmentTaskIndexWrapper(taskIdToSegmentIndexMap);
-                segmentTaskIndexWrapper.incrementAccessCount();
-              }
-              Iterator<Map.Entry<TaskBucketHolder, List<TableBlockInfo>>> iterator =
-                  taskIdToTableBlockInfoMap.entrySet().iterator();
-              long requiredSize =
-                  calculateRequiredSize(taskIdToTableBlockInfoMap, absoluteTableIdentifier);
-              segmentTaskIndexWrapper
-                  .setMemorySize(requiredSize + segmentTaskIndexWrapper.getMemorySize());
-              boolean isAddedToLruCache =
-                  lruCache.put(lruCacheKey, segmentTaskIndexWrapper, requiredSize);
-              if (isAddedToLruCache) {
-                while (iterator.hasNext()) {
-                  Map.Entry<TaskBucketHolder, List<TableBlockInfo>> taskToBlockInfoList =
-                      iterator.next();
-                  taskBucketHolder = taskToBlockInfoList.getKey();
-                  taskIdToSegmentIndexMap.put(taskBucketHolder,
-                      loadBlocks(taskBucketHolder, taskToBlockInfoList.getValue(),
-                          absoluteTableIdentifier));
-                }
-              } else {
-                throw new IndexBuilderException(
-                    "Can not load the segment. No Enough space available.");
-              }
-
-              // set the latest timestamp.
-              segmentTaskIndexWrapper
-                  .setRefreshedTimeStamp(updateVO.getCreatedOrUpdatedTimeStamp());
-              // tableSegmentMapTemp.put(next.getKey(), taskIdToSegmentIndexMap);
-              // removing from segment lock map as once segment is loaded
-              // if concurrent query is coming for same segment
-              // it will wait on the lock so after this segment will be already
-              // loaded so lock is not required, that is why removing the
-              // the lock object as it wont be useful
-              segmentLockMap.remove(lruCacheKey);
-            } else {
-              segmentTaskIndexWrapper.incrementAccessCount();
-            }
-          }
-        } else {
-          segmentTaskIndexWrapper.incrementAccessCount();
-        }
-      }
-    } catch (IndexBuilderException e) {
-      LOGGER.error("Problem while loading the segment");
-      throw e;
-    }
-    return segmentTaskIndexWrapper;
-  }
-
-  private long calculateRequiredSize(
-      Map<TaskBucketHolder, List<TableBlockInfo>> taskIdToTableBlockInfoMap,
-      AbsoluteTableIdentifier absoluteTableIdentifier) {
-    Iterator<Map.Entry<TaskBucketHolder, List<TableBlockInfo>>> iterator =
-        taskIdToTableBlockInfoMap.entrySet().iterator();
-    TaskBucketHolder taskBucketHolder;
-    long driverBTreeSize = 0;
-    while (iterator.hasNext()) {
-      Map.Entry<TaskBucketHolder, List<TableBlockInfo>> taskToBlockInfoList = iterator.next();
-      taskBucketHolder = taskToBlockInfoList.getKey();
-      driverBTreeSize += CarbonUtil
-          .calculateDriverBTreeSize(taskBucketHolder.taskNo, taskBucketHolder.bucketNumber,
-              taskToBlockInfoList.getValue(), absoluteTableIdentifier);
-    }
-    return driverBTreeSize;
-  }
-
-  /**
-   * Below method will be used to get the task id to all the table block info belongs to
-   * that task id mapping
-   *
-   * @param segmentToTableBlocksInfos segment if to table blocks info map
-   * @return task id to table block info mapping
-   */
-  private Map<TaskBucketHolder, List<TableBlockInfo>> mappedAndGetTaskIdToTableBlockInfo(
-      Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos) {
-    Map<TaskBucketHolder, List<TableBlockInfo>> taskIdToTableBlockInfoMap =
-        new ConcurrentHashMap<>();
-    Iterator<Entry<String, List<TableBlockInfo>>> iterator =
-        segmentToTableBlocksInfos.entrySet().iterator();
-    while (iterator.hasNext()) {
-      Entry<String, List<TableBlockInfo>> next = iterator.next();
-      List<TableBlockInfo> value = next.getValue();
-      for (TableBlockInfo blockInfo : value) {
-        String taskNo = DataFileUtil.getTaskNo(blockInfo.getFilePath());
-        String bucketNo = DataFileUtil.getBucketNo(blockInfo.getFilePath());
-        TaskBucketHolder bucketHolder = new TaskBucketHolder(taskNo, bucketNo);
-        List<TableBlockInfo> list = taskIdToTableBlockInfoMap.get(bucketHolder);
-        if (null == list) {
-          list = new ArrayList<TableBlockInfo>();
-          taskIdToTableBlockInfoMap.put(bucketHolder, list);
-        }
-        list.add(blockInfo);
-      }
-
-    }
-    return taskIdToTableBlockInfoMap;
-  }
-
-  /**
-   * Below method will be used to get the segment level lock object
-   *
-   * @param segmentId
-   * @return lock object
-   */
-  private synchronized Object addAndGetSegmentLock(String segmentId) {
-    // get the segment lock object if it is present then return
-    // otherwise add the new lock and return
-    Object segmentLoderLockObject = segmentLockMap.get(segmentId);
-    if (null == segmentLoderLockObject) {
-      segmentLoderLockObject = new Object();
-      segmentLockMap.put(segmentId, segmentLoderLockObject);
-    }
-    return segmentLoderLockObject;
-  }
-
-  /**
-   * Below method will be used to load the blocks
-   *
-   * @param tableBlockInfoList
-   * @return loaded segment
-   * @throws IOException
-   */
-  private AbstractIndex loadBlocks(TaskBucketHolder taskBucketHolder,
-      List<TableBlockInfo> tableBlockInfoList, AbsoluteTableIdentifier tableIdentifier)
-      throws IOException {
-    // all the block of one task id will be loaded together
-    // so creating a list which will have all the data file meta data to of one task
-    List<DataFileFooter> footerList = CarbonUtil
-        .readCarbonIndexFile(taskBucketHolder.taskNo, taskBucketHolder.bucketNumber,
-            tableBlockInfoList, tableIdentifier);
-    AbstractIndex segment = new SegmentTaskIndex();
-    // file path of only first block is passed as it all table block info path of
-    // same task id will be same
-    segment.buildIndex(footerList);
-    return segment;
-  }
-
-  /**
-   * The method clears the access count of table segments
-   *
-   * @param tableSegmentUniqueIdentifiers
-   */
-  @Override
-  public void clearAccessCount(List<TableSegmentUniqueIdentifier> tableSegmentUniqueIdentifiers) {
-    for (TableSegmentUniqueIdentifier segmentUniqueIdentifier : tableSegmentUniqueIdentifiers) {
-      SegmentTaskIndexWrapper cacheable = (SegmentTaskIndexWrapper) lruCache
-          .get(segmentUniqueIdentifier.getUniqueTableSegmentIdentifier());
-      cacheable.clear();
-    }
-  }
-
-  public static class TaskBucketHolder implements Serializable {
-
-    public String taskNo;
-
-    public String bucketNumber;
-
-    public TaskBucketHolder(String taskNo, String bucketNumber) {
-      this.taskNo = taskNo;
-      this.bucketNumber = bucketNumber;
-    }
-
-    @Override public boolean equals(Object o) {
-      if (this == o) return true;
-      if (o == null || getClass() != o.getClass()) return false;
-
-      TaskBucketHolder that = (TaskBucketHolder) o;
-
-      if (taskNo != null ? !taskNo.equals(that.taskNo) : that.taskNo != null) return false;
-      return bucketNumber != null ?
-          bucketNumber.equals(that.bucketNumber) :
-          that.bucketNumber == null;
-
-    }
-
-    @Override public int hashCode() {
-      int result = taskNo != null ? taskNo.hashCode() : 0;
-      result = 31 * result + (bucketNumber != null ? bucketNumber.hashCode() : 0);
-      return result;
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ce09aaaf/core/src/main/java/org/apache/carbondata/core/carbon/datastore/TableSegmentUniqueIdentifier.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/TableSegmentUniqueIdentifier.java b/core/src/main/java/org/apache/carbondata/core/carbon/datastore/TableSegmentUniqueIdentifier.java
deleted file mode 100644
index cacc902..0000000
--- a/core/src/main/java/org/apache/carbondata/core/carbon/datastore/TableSegmentUniqueIdentifier.java
+++ /dev/null
@@ -1,144 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.carbondata.core.carbon.datastore;
-
-import java.util.List;
-import java.util.Map;
-
-import org.apache.carbondata.core.carbon.AbsoluteTableIdentifier;
-import org.apache.carbondata.core.carbon.CarbonTableIdentifier;
-import org.apache.carbondata.core.carbon.datastore.block.TableBlockInfo;
-import org.apache.carbondata.core.constants.CarbonCommonConstants;
-
-/**
- * Class holds the absoluteTableIdentifier and segmentId to uniquely identify a segment
- */
-public class TableSegmentUniqueIdentifier {
-  /**
-   * table fully qualified identifier
-   */
-  private AbsoluteTableIdentifier absoluteTableIdentifier;
-
-  /**
-   * segment to tableBlockInfo map
-   */
-  Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos;
-
-  private String segmentId;
-  private  boolean isSegmentUpdated;
-
-  /**
-   * Constructor to initialize the class instance
-   * @param absoluteTableIdentifier
-   * @param segmentId
-   */
-  public TableSegmentUniqueIdentifier(AbsoluteTableIdentifier absoluteTableIdentifier,
-      String segmentId) {
-    this.absoluteTableIdentifier = absoluteTableIdentifier;
-    this.segmentId = segmentId;
-  }
-
-  public TableSegmentUniqueIdentifier(AbsoluteTableIdentifier absoluteTableIdentifier,
-      Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos, String segmentId) {
-    this.absoluteTableIdentifier = absoluteTableIdentifier;
-    this.segmentToTableBlocksInfos = segmentToTableBlocksInfos;
-    this.segmentId = segmentId;
-  }
-
-  /**
-   * returns AbsoluteTableIdentifier
-   * @return
-   */
-  public AbsoluteTableIdentifier getAbsoluteTableIdentifier() {
-    return absoluteTableIdentifier;
-  }
-
-  public void setAbsoluteTableIdentifier(AbsoluteTableIdentifier absoluteTableIdentifier) {
-    this.absoluteTableIdentifier = absoluteTableIdentifier;
-  }
-
-  /**
-   *  returns the segment to tableBlockInfo map
-   * @return
-   */
-  public Map<String, List<TableBlockInfo>> getSegmentToTableBlocksInfos() {
-    return segmentToTableBlocksInfos;
-  }
-
-  /**
-   * set the segment to tableBlockInfo map
-   * @param segmentToTableBlocksInfos
-   */
-  public void setSegmentToTableBlocksInfos(
-      Map<String, List<TableBlockInfo>> segmentToTableBlocksInfos) {
-    this.segmentToTableBlocksInfos = segmentToTableBlocksInfos;
-  }
-
-  public String getSegmentId() {
-    return segmentId;
-  }
-
-  /**
-   * method returns the id to uniquely identify a key
-   *
-   * @return
-   */
-  public String getUniqueTableSegmentIdentifier() {
-    CarbonTableIdentifier carbonTableIdentifier =
-            absoluteTableIdentifier.getCarbonTableIdentifier();
-    return carbonTableIdentifier.getDatabaseName() + CarbonCommonConstants.FILE_SEPARATOR
-            + carbonTableIdentifier.getTableName() + CarbonCommonConstants.UNDERSCORE
-            + carbonTableIdentifier.getTableId() + CarbonCommonConstants.FILE_SEPARATOR + segmentId;
-  }
-  public void setIsSegmentUpdated(boolean isSegmentUpdated) {
-    this.isSegmentUpdated=isSegmentUpdated;
-  }
-
-  public boolean isSegmentUpdated() {
-    return isSegmentUpdated;
-  }
-
-  /**
-   * equals method to compare two objects having same
-   * absoluteIdentifier and segmentId
-   * @param o
-   * @return
-   */
-  @Override public boolean equals(Object o) {
-    if (this == o) return true;
-    if (o == null || getClass() != o.getClass()) return false;
-
-    TableSegmentUniqueIdentifier that = (TableSegmentUniqueIdentifier) o;
-
-    if (!absoluteTableIdentifier.equals(that.absoluteTableIdentifier)) return false;
-    return segmentId.equals(that.segmentId);
-
-  }
-
-  /**
-   * Returns hashcode for the TableSegmentIdentifier
-   * @return
-   */
-  @Override public int hashCode() {
-    int result = absoluteTableIdentifier.hashCode();
-    result = 31 * result + segmentId.hashCode();
-    return result;
-  }
-}


Mime
View raw message