carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ravipes...@apache.org
Subject [36/50] [abbrv] carbondata git commit: [CARBONDATA-1983] Remove unnecessary WriterVo object
Date Tue, 09 Jan 2018 04:02:04 GMT
[CARBONDATA-1983] Remove unnecessary WriterVo object

This closes #1761


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/c9e58429
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/c9e58429
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/c9e58429

Branch: refs/heads/branch-1.3
Commit: c9e58429af5f78e4001d90d6e3c90a0d48ed2bcf
Parents: 837fdd2
Author: Jacky Li <jacky.likun@qq.com>
Authored: Thu Jan 4 00:03:39 2018 +0800
Committer: QiangCai <qiangcai@qq.com>
Committed: Fri Jan 5 14:32:55 2018 +0800

----------------------------------------------------------------------
 .../store/CarbonDataWriterFactory.java          |   7 +-
 .../store/CarbonFactDataHandlerColumnar.java    |  29 +--
 .../store/writer/AbstractFactDataWriter.java    |  49 ++--
 .../store/writer/CarbonDataWriterVo.java        | 252 -------------------
 .../writer/v3/CarbonFactDataWriterImplV3.java   |  12 +-
 5 files changed, 35 insertions(+), 314 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/c9e58429/processing/src/main/java/org/apache/carbondata/processing/store/CarbonDataWriterFactory.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonDataWriterFactory.java
b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonDataWriterFactory.java
index 7a5cc11..353a56d 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonDataWriterFactory.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonDataWriterFactory.java
@@ -18,7 +18,6 @@
 package org.apache.carbondata.processing.store;
 
 import org.apache.carbondata.core.metadata.ColumnarFormatVersion;
-import org.apache.carbondata.processing.store.writer.CarbonDataWriterVo;
 import org.apache.carbondata.processing.store.writer.CarbonFactDataWriter;
 import org.apache.carbondata.processing.store.writer.v3.CarbonFactDataWriterImplV3;
 
@@ -57,15 +56,15 @@ class CarbonDataWriterFactory {
    * @return writer instance
    */
   public CarbonFactDataWriter getFactDataWriter(final ColumnarFormatVersion version,
-      final CarbonDataWriterVo carbonDataWriterVo) {
+      final CarbonFactDataHandlerModel model) {
     switch (version) {
       case V1:
       case V2:
         throw new UnsupportedOperationException("V1 and V2 CarbonData Writer is not supported");
       case V3:
-        return new CarbonFactDataWriterImplV3(carbonDataWriterVo);
+        return new CarbonFactDataWriterImplV3(model);
       default:
-        return new CarbonFactDataWriterImplV3(carbonDataWriterVo);
+        return new CarbonFactDataWriterImplV3(model);
     }
   }
 

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c9e58429/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
index 2e62d28..06522a4 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/CarbonFactDataHandlerColumnar.java
@@ -50,7 +50,6 @@ import org.apache.carbondata.core.util.CarbonThreadFactory;
 import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.processing.datatypes.GenericDataType;
 import org.apache.carbondata.processing.loading.sort.SortScopeOptions;
-import org.apache.carbondata.processing.store.writer.CarbonDataWriterVo;
 import org.apache.carbondata.processing.store.writer.CarbonFactDataWriter;
 
 /**
@@ -460,33 +459,7 @@ public class CarbonFactDataHandlerColumnar implements CarbonFactHandler
{
    * @return data writer instance
    */
   private CarbonFactDataWriter getFactDataWriter() {
-    return CarbonDataWriterFactory.getInstance()
-        .getFactDataWriter(version, getDataWriterVo());
-  }
-
-  /**
-   * Below method will be used to get the writer vo
-   *
-   * @return data writer vo object
-   */
-  private CarbonDataWriterVo getDataWriterVo() {
-    CarbonDataWriterVo carbonDataWriterVo = new CarbonDataWriterVo();
-    carbonDataWriterVo.setStoreLocation(model.getStoreLocation());
-    carbonDataWriterVo.setMeasureCount(model.getMeasureCount());
-    carbonDataWriterVo.setTableName(model.getTableName());
-    carbonDataWriterVo.setNoDictionaryCount(model.getNoDictionaryCount());
-    carbonDataWriterVo.setCarbonDataFileAttributes(model.getCarbonDataFileAttributes());
-    carbonDataWriterVo.setDatabaseName(model.getDatabaseName());
-    carbonDataWriterVo.setWrapperColumnSchemaList(model.getWrapperColumnSchema());
-    carbonDataWriterVo.setCarbonDataDirectoryPath(model.getCarbonDataDirectoryPath());
-    carbonDataWriterVo.setColCardinality(model.getColCardinality());
-    carbonDataWriterVo.setSegmentProperties(model.getSegmentProperties());
-    carbonDataWriterVo.setTableBlocksize(model.getBlockSizeInMB());
-    carbonDataWriterVo.setBucketNumber(model.getBucketId());
-    carbonDataWriterVo.setTaskExtension(model.getTaskExtension());
-    carbonDataWriterVo.setSchemaUpdatedTimeStamp(model.getSchemaUpdatedTimeStamp());
-    carbonDataWriterVo.setListener(model.getDataMapWriterlistener());
-    return carbonDataWriterVo;
+    return CarbonDataWriterFactory.getInstance().getFactDataWriter(version, model);
   }
 
   /**

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c9e58429/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
b/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
index 855ec03..9d55d30 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/AbstractFactDataWriter.java
@@ -60,6 +60,7 @@ import org.apache.carbondata.format.BlockIndex;
 import org.apache.carbondata.format.BlockletInfo3;
 import org.apache.carbondata.format.IndexHeader;
 import org.apache.carbondata.processing.datamap.DataMapWriterListener;
+import org.apache.carbondata.processing.store.CarbonFactDataHandlerModel;
 
 import org.apache.commons.lang3.ArrayUtils;
 import org.apache.hadoop.io.IOUtils;
@@ -108,7 +109,7 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter
{
    */
   protected List<org.apache.carbondata.format.ColumnSchema> thriftColumnSchemaList;
   protected NumberCompressor numberCompressor;
-  protected CarbonDataWriterVo dataWriterVo;
+  protected CarbonFactDataHandlerModel model;
   protected List<List<Long>> dataChunksOffsets;
   protected List<List<Short>> dataChunksLength;
   /**
@@ -155,14 +156,14 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter
{
    */
   protected DataMapWriterListener listener;
 
-  public AbstractFactDataWriter(CarbonDataWriterVo dataWriterVo) {
-    this.dataWriterVo = dataWriterVo;
+  public AbstractFactDataWriter(CarbonFactDataHandlerModel model) {
+    this.model = model;
     blockIndexInfoList = new ArrayList<>();
     // get max file size;
     CarbonProperties propInstance = CarbonProperties.getInstance();
     // if blocksize=2048, then 2048*1024*1024 will beyond the range of Int
     this.fileSizeInBytes =
-        (long) dataWriterVo.getTableBlocksize() * CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR
+        (long) this.model.getBlockSizeInMB() * CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR
             * CarbonCommonConstants.BYTE_TO_KB_CONVERSION_FACTOR;
 
     // size reserved in one file for writing block meta data. It will be in percentage
@@ -175,26 +176,26 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter
{
         blockSizeThreshold);
 
     this.executorService = Executors.newFixedThreadPool(1,
-        new CarbonThreadFactory("LocalToHDFSCopyPool:" + dataWriterVo.getTableName()));
+        new CarbonThreadFactory("LocalToHDFSCopyPool:" + this.model.getTableName()));
     executorServiceSubmitList = new ArrayList<>(CarbonCommonConstants.DEFAULT_COLLECTION_SIZE);
     // in case of compaction we will pass the cardinality.
-    this.localCardinality = dataWriterVo.getColCardinality();
+    this.localCardinality = this.model.getColCardinality();
 
     //TODO: We should delete the levelmetadata file after reading here.
     // so only data loading flow will need to read from cardinality file.
     if (null == this.localCardinality) {
       this.localCardinality = CarbonMergerUtil
-          .getCardinalityFromLevelMetadata(dataWriterVo.getStoreLocation(),
-              dataWriterVo.getTableName());
+          .getCardinalityFromLevelMetadata(this.model.getStoreLocation(),
+              this.model.getTableName());
       List<Integer> cardinalityList = new ArrayList<Integer>();
       thriftColumnSchemaList = getColumnSchemaListAndCardinality(cardinalityList, localCardinality,
-          dataWriterVo.getWrapperColumnSchemaList());
+          this.model.getWrapperColumnSchema());
       localCardinality =
           ArrayUtils.toPrimitive(cardinalityList.toArray(new Integer[cardinalityList.size()]));
     } else { // for compaction case
       List<Integer> cardinalityList = new ArrayList<Integer>();
       thriftColumnSchemaList = getColumnSchemaListAndCardinality(cardinalityList, localCardinality,
-          dataWriterVo.getWrapperColumnSchemaList());
+          this.model.getWrapperColumnSchema());
     }
     this.numberCompressor = new NumberCompressor(Integer.parseInt(CarbonProperties.getInstance()
         .getProperty(CarbonCommonConstants.BLOCKLET_SIZE,
@@ -203,7 +204,7 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter
{
     this.dataChunksLength = new ArrayList<>();
     blockletMetadata = new ArrayList<BlockletInfo3>();
     blockletIndex = new ArrayList<>();
-    listener = dataWriterVo.getListener();
+    listener = this.model.getDataMapWriterlistener();
   }
 
   /**
@@ -308,14 +309,14 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter
{
     initFileCount();
 
     //each time we initialize writer, we choose a local temp location randomly
-    String[] tempFileLocations = dataWriterVo.getStoreLocation();
+    String[] tempFileLocations = model.getStoreLocation();
     String chosenTempLocation = tempFileLocations[new Random().nextInt(tempFileLocations.length)];
     LOGGER.info("Randomly choose factdata temp location: " + chosenTempLocation);
 
     this.carbonDataFileName = CarbonTablePath
-        .getCarbonDataFileName(fileCount, dataWriterVo.getCarbonDataFileAttributes().getTaskId(),
-            dataWriterVo.getBucketNumber(), dataWriterVo.getTaskExtension(),
-            "" + dataWriterVo.getCarbonDataFileAttributes().getFactTimeStamp());
+        .getCarbonDataFileName(fileCount, model.getCarbonDataFileAttributes().getTaskId(),
+            model.getBucketId(), model.getTaskExtension(),
+            "" + model.getCarbonDataFileAttributes().getFactTimeStamp());
     this.carbonDataFileTempPath = chosenTempLocation + File.separator
         + carbonDataFileName + CarbonCommonConstants.FILE_INPROGRESS_STATUS;
     this.fileCount++;
@@ -334,7 +335,7 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter
{
     int fileInitialCount = 0;
     FileFilter fileFilter = new FileFilter() {
       @Override public boolean accept(File pathVal) {
-        if (!pathVal.isDirectory() && pathVal.getName().startsWith(dataWriterVo.getTableName())
+        if (!pathVal.isDirectory() && pathVal.getName().startsWith(model.getTableName())
             && pathVal.getName().contains(CarbonCommonConstants.FACT_FILE_EXT)) {
           return true;
         }
@@ -343,7 +344,7 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter
{
     };
 
     List<File> dataFileList = new ArrayList<File>();
-    for (String tempLoc : dataWriterVo.getStoreLocation()) {
+    for (String tempLoc : model.getStoreLocation()) {
       File[] subFiles = new File(tempLoc).listFiles(fileFilter);
       if (null != subFiles && subFiles.length > 0) {
         dataFileList.addAll(Arrays.asList(subFiles));
@@ -421,18 +422,18 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter
{
   protected void writeIndexFile() throws IOException, CarbonDataWriterException {
     // get the header
     IndexHeader indexHeader = CarbonMetadataUtil
-        .getIndexHeader(localCardinality, thriftColumnSchemaList, dataWriterVo.getBucketNumber());
+        .getIndexHeader(localCardinality, thriftColumnSchemaList, model.getBucketId());
     // get the block index info thrift
     List<BlockIndex> blockIndexThrift = CarbonMetadataUtil.getBlockIndexInfo(blockIndexInfoList);
     // randomly choose a temp location for index file
-    String[] tempLocations = dataWriterVo.getStoreLocation();
+    String[] tempLocations = model.getStoreLocation();
     String chosenTempLocation = tempLocations[new Random().nextInt(tempLocations.length)];
     LOGGER.info("Randomly choose index file location: " + chosenTempLocation);
 
     String fileName = chosenTempLocation + File.separator + CarbonTablePath
-        .getCarbonIndexFileName(dataWriterVo.getCarbonDataFileAttributes().getTaskId(),
-            dataWriterVo.getBucketNumber(), dataWriterVo.getTaskExtension(),
-            "" + dataWriterVo.getCarbonDataFileAttributes().getFactTimeStamp());
+        .getCarbonIndexFileName(model.getCarbonDataFileAttributes().getTaskId(),
+            model.getBucketId(), model.getTaskExtension(),
+            "" + model.getCarbonDataFileAttributes().getFactTimeStamp());
     CarbonIndexFileWriter writer = new CarbonIndexFileWriter();
     // open file
     writer.openThriftWriter(fileName);
@@ -494,11 +495,11 @@ public abstract class AbstractFactDataWriter implements CarbonFactDataWriter
{
   protected void copyCarbonDataFileToCarbonStorePath(String localFileName)
       throws CarbonDataWriterException {
     long copyStartTime = System.currentTimeMillis();
-    LOGGER.info("Copying " + localFileName + " --> " + dataWriterVo.getCarbonDataDirectoryPath());
+    LOGGER.info("Copying " + localFileName + " --> " + model.getCarbonDataDirectoryPath());
     try {
       CarbonFile localCarbonFile =
           FileFactory.getCarbonFile(localFileName, FileFactory.getFileType(localFileName));
-      String carbonFilePath = dataWriterVo.getCarbonDataDirectoryPath() + localFileName
+      String carbonFilePath = model.getCarbonDataDirectoryPath() + localFileName
           .substring(localFileName.lastIndexOf(File.separator));
       copyLocalFileToCarbonStore(carbonFilePath, localFileName,
           CarbonCommonConstants.BYTEBUFFER_SIZE,

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c9e58429/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonDataWriterVo.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonDataWriterVo.java
b/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonDataWriterVo.java
deleted file mode 100644
index 79cdd95..0000000
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/CarbonDataWriterVo.java
+++ /dev/null
@@ -1,252 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.carbondata.processing.store.writer;
-
-import java.util.List;
-
-import org.apache.carbondata.core.datastore.block.SegmentProperties;
-import org.apache.carbondata.core.metadata.schema.table.column.ColumnSchema;
-import org.apache.carbondata.processing.datamap.DataMapWriterListener;
-import org.apache.carbondata.processing.store.CarbonDataFileAttributes;
-
-/**
- * Value object for writing the data
- */
-public class CarbonDataWriterVo {
-
-  private String[] storeLocation;
-
-  private int measureCount;
-
-  private String tableName;
-
-  private int NoDictionaryCount;
-
-  private CarbonDataFileAttributes carbonDataFileAttributes;
-
-  private String databaseName;
-
-  private List<ColumnSchema> wrapperColumnSchemaList;
-
-  private String carbonDataDirectoryPath;
-
-  private int[] colCardinality;
-
-  private SegmentProperties segmentProperties;
-
-  private int tableBlocksize;
-
-  private int bucketNumber;
-
-  private long schemaUpdatedTimeStamp;
-
-  private int taskExtension;
-
-  private DataMapWriterListener listener;
-
-  /**
-   * @return the storeLocation
-   */
-  public String[] getStoreLocation() {
-    return storeLocation;
-  }
-
-  /**
-   * @param storeLocation the storeLocation to set
-   */
-  public void setStoreLocation(String[] storeLocation) {
-    this.storeLocation = storeLocation;
-  }
-
-  /**
-   * @return the measureCount
-   */
-  public int getMeasureCount() {
-    return measureCount;
-  }
-
-  /**
-   * @param measureCount the measureCount to set
-   */
-  public void setMeasureCount(int measureCount) {
-    this.measureCount = measureCount;
-  }
-
-  /**
-   * @return the tableName
-   */
-  public String getTableName() {
-    return tableName;
-  }
-
-  /**
-   * @param tableName the tableName to set
-   */
-  public void setTableName(String tableName) {
-    this.tableName = tableName;
-  }
-
-  /**
-   * @return the noDictionaryCount
-   */
-  public int getNoDictionaryCount() {
-    return NoDictionaryCount;
-  }
-
-  /**
-   * @param noDictionaryCount the noDictionaryCount to set
-   */
-  public void setNoDictionaryCount(int noDictionaryCount) {
-    NoDictionaryCount = noDictionaryCount;
-  }
-
-  /**
-   * @return the carbonDataFileAttributes
-   */
-  public CarbonDataFileAttributes getCarbonDataFileAttributes() {
-    return carbonDataFileAttributes;
-  }
-
-  /**
-   * @param carbonDataFileAttributes the carbonDataFileAttributes to set
-   */
-  public void setCarbonDataFileAttributes(CarbonDataFileAttributes carbonDataFileAttributes)
{
-    this.carbonDataFileAttributes = carbonDataFileAttributes;
-  }
-
-  /**
-   * @return the databaseName
-   */
-  public String getDatabaseName() {
-    return databaseName;
-  }
-
-  /**
-   * @param databaseName the databaseName to set
-   */
-  public void setDatabaseName(String databaseName) {
-    this.databaseName = databaseName;
-  }
-
-  /**
-   * @return the wrapperColumnSchemaList
-   */
-  public List<ColumnSchema> getWrapperColumnSchemaList() {
-    return wrapperColumnSchemaList;
-  }
-
-  /**
-   * @param wrapperColumnSchemaList the wrapperColumnSchemaList to set
-   */
-  public void setWrapperColumnSchemaList(List<ColumnSchema> wrapperColumnSchemaList)
{
-    this.wrapperColumnSchemaList = wrapperColumnSchemaList;
-  }
-
-  /**
-   * @return the carbonDataDirectoryPath
-   */
-  public String getCarbonDataDirectoryPath() {
-    return carbonDataDirectoryPath;
-  }
-
-  /**
-   * @param carbonDataDirectoryPath the carbonDataDirectoryPath to set
-   */
-  public void setCarbonDataDirectoryPath(String carbonDataDirectoryPath) {
-    this.carbonDataDirectoryPath = carbonDataDirectoryPath;
-  }
-
-  /**
-   * @return the colCardinality
-   */
-  public int[] getColCardinality() {
-    return colCardinality;
-  }
-
-  /**
-   * @param colCardinality the colCardinality to set
-   */
-  public void setColCardinality(int[] colCardinality) {
-    this.colCardinality = colCardinality;
-  }
-
-  /**
-   * @return the segmentProperties
-   */
-  public SegmentProperties getSegmentProperties() {
-    return segmentProperties;
-  }
-
-  /**
-   * @param segmentProperties the segmentProperties to set
-   */
-  public void setSegmentProperties(SegmentProperties segmentProperties) {
-    this.segmentProperties = segmentProperties;
-  }
-
-  /**
-   * @return the tableBlocksize
-   */
-  public int getTableBlocksize() {
-    return tableBlocksize;
-  }
-
-  /**
-   * @param tableBlocksize the tableBlocksize to set
-   */
-  public void setTableBlocksize(int tableBlocksize) {
-    this.tableBlocksize = tableBlocksize;
-  }
-
-  public int getBucketNumber() {
-    return bucketNumber;
-  }
-
-  public void setBucketNumber(int bucketNumber) {
-    this.bucketNumber = bucketNumber;
-  }
-
-  /**
-   * @return
-   */
-  public long getSchemaUpdatedTimeStamp() {
-    return schemaUpdatedTimeStamp;
-  }
-
-  /**
-   * @param schemaUpdatedTimeStamp
-   */
-  public void setSchemaUpdatedTimeStamp(long schemaUpdatedTimeStamp) {
-    this.schemaUpdatedTimeStamp = schemaUpdatedTimeStamp;
-  }
-
-  public int getTaskExtension() {
-    return taskExtension;
-  }
-
-  public void setTaskExtension(int taskExtension) {
-    this.taskExtension = taskExtension;
-  }
-
-  public void setListener(DataMapWriterListener listener) {
-    this.listener = listener;
-  }
-
-  public DataMapWriterListener getListener() {
-    return listener;
-  }
-}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/c9e58429/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
index d8ae8ff..ddf444d 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/writer/v3/CarbonFactDataWriterImplV3.java
@@ -39,9 +39,9 @@ import org.apache.carbondata.core.util.CarbonUtil;
 import org.apache.carbondata.core.util.DataFileFooterConverterV3;
 import org.apache.carbondata.format.BlockletInfo3;
 import org.apache.carbondata.format.FileFooter3;
+import org.apache.carbondata.processing.store.CarbonFactDataHandlerModel;
 import org.apache.carbondata.processing.store.TablePage;
 import org.apache.carbondata.processing.store.writer.AbstractFactDataWriter;
-import org.apache.carbondata.processing.store.writer.CarbonDataWriterVo;
 
 /**
  * Below class will be used to write the data in V3 format
@@ -65,8 +65,8 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter {
    */
   private long blockletSizeThreshold;
 
-  public CarbonFactDataWriterImplV3(CarbonDataWriterVo dataWriterVo) {
-    super(dataWriterVo);
+  public CarbonFactDataWriterImplV3(CarbonFactDataHandlerModel model) {
+    super(model);
     blockletSizeThreshold = Long.parseLong(CarbonProperties.getInstance()
         .getProperty(CarbonV3DataFormatConstants.BLOCKLET_SIZE_IN_MB,
             CarbonV3DataFormatConstants.BLOCKLET_SIZE_IN_MB_DEFAULT_VALUE))
@@ -227,7 +227,7 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter
{
   private void writeHeaderToFile(FileChannel channel) throws IOException {
     byte[] fileHeader = CarbonUtil.getByteArray(
         CarbonMetadataUtil.getFileHeader(
-            true, thriftColumnSchemaList, dataWriterVo.getSchemaUpdatedTimeStamp()));
+            true, thriftColumnSchemaList, model.getSchemaUpdatedTimeStamp()));
     ByteBuffer buffer = ByteBuffer.wrap(fileHeader);
     channel.write(buffer);
   }
@@ -292,7 +292,7 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter
{
     measureOffset = offset;
     blockletIndex.add(
         CarbonMetadataUtil.getBlockletIndex(
-            encodedTablePages, dataWriterVo.getSegmentProperties().getMeasures()));
+            encodedTablePages, model.getSegmentProperties().getMeasures()));
     BlockletInfo3 blockletInfo3 =
         new BlockletInfo3(numberOfRows, currentDataChunksOffset, currentDataChunksLength,
             dimensionOffset, measureOffset, blockletDataHolder.getEncodedTablePages().size());
@@ -314,7 +314,7 @@ public class CarbonFactDataWriterImplV3 extends AbstractFactDataWriter
{
     for (org.apache.carbondata.format.BlockletIndex index : blockletIndex) {
       BlockletInfo3 blockletInfo3 = blockletMetadata.get(i);
       BlockletInfo blockletInfo = converterV3.getBlockletInfo(blockletInfo3,
-          dataWriterVo.getSegmentProperties().getDimensions().size());
+          model.getSegmentProperties().getDimensions().size());
       BlockletBTreeIndex bTreeIndex = new BlockletBTreeIndex(index.b_tree_index.getStart_key(),
           index.b_tree_index.getEnd_key());
       BlockletMinMaxIndex minMaxIndex = new BlockletMinMaxIndex();


Mime
View raw message