carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jack...@apache.org
Subject [37/38] incubator-carbondata git commit: reuse test case for integration module
Date Sat, 07 Jan 2017 16:37:11 GMT
reuse test case for integration module

fix comments


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/af2f204e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/af2f204e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/af2f204e

Branch: refs/heads/master
Commit: af2f204e4fbc38b973a26106879c92c0618fba02
Parents: b0750c1
Author: QiangCai <qiangcai@qq.com>
Authored: Thu Dec 29 22:43:29 2016 +0800
Committer: jackylk <jacky.likun@huawei.com>
Committed: Sun Jan 8 00:35:04 2017 +0800

----------------------------------------------------------------------
 .../scan/expression/ExpressionResult.java       |    3 +
 integration/spark-common-test/pom.xml           |  232 ++++
 .../spark/load/CarbonLoaderUtilTest.java        |  419 +++++++
 .../validation/FileFooterValidator.java         |  157 +++
 .../src/test/resources/100_olap.csv             |   99 ++
 .../src/test/resources/10dim_4msr.csv           | 1000 +++++++++++++++
 .../src/test/resources/IUD/T_Hive1.csv          |   10 +
 .../src/test/resources/IUD/comp1.csv            |   11 +
 .../src/test/resources/IUD/comp2.csv            |   11 +
 .../src/test/resources/IUD/comp3.csv            |   11 +
 .../src/test/resources/IUD/comp4.csv            |   11 +
 .../src/test/resources/IUD/dest.csv             |    6 +
 .../src/test/resources/IUD/other.csv            |    3 +
 .../src/test/resources/IUD/sample.csv           |    4 +
 .../src/test/resources/IUD/sample_updated.csv   |    2 +
 .../src/test/resources/IUD/source2.csv          |    3 +
 .../src/test/resources/IUD/source3.csv          |    7 +
 .../src/test/resources/IUD/update01.csv         |    6 +
 .../src/test/resources/OLDFORMATTABLE.csv       |   34 +
 .../src/test/resources/OLDFORMATTABLEHIVE.csv   |   33 +
 .../test/resources/Test_Data1_Logrithmic.csv    |    3 +
 .../src/test/resources/alldatatypescube.xml     |  109 ++
 .../20160423/1400_1405/complex.dictionary       |   20 +
 .../sample/20160423/1400_1405/sample.dictionary |    9 +
 .../src/test/resources/array1.csv               |    2 +
 .../src/test/resources/arrayColumnEmpty.csv     |   21 +
 .../src/test/resources/avgTest.csv              |   16 +
 .../test/resources/badrecords/datasample.csv    |    7 +
 .../badrecords/emptyTimeStampValue.csv          |    8 +
 .../test/resources/badrecords/emptyValues.csv   |    8 +
 .../badrecords/insufficientColumns.csv          |    4 +
 .../resources/badrecords/seriazableValue.csv    |    3 +
 .../src/test/resources/bigIntData.csv           |   14 +
 .../src/test/resources/bigIntDataWithHeader.csv |   13 +
 .../test/resources/bigIntDataWithoutHeader.csv  |   12 +
 .../src/test/resources/big_int_Decimal.csv      |    3 +
 .../src/test/resources/channelsId.csv           |   10 +
 .../src/test/resources/character_carbon.csv     |   33 +
 .../src/test/resources/character_hive.csv       |   32 +
 .../test/resources/columndictionary/country.csv |    5 +
 .../test/resources/columndictionary/name.csv    |   10 +
 .../src/test/resources/comment.csv              |    5 +
 .../test/resources/compaction/compaction1.csv   |    6 +
 .../compaction/compaction1_forhive.csv          |    5 +
 .../test/resources/compaction/compaction2.csv   |    6 +
 .../test/resources/compaction/compaction3.csv   |    6 +
 .../resources/compaction/compactioncard2.csv    |  257 ++++
 .../compaction/compactioncard2_forhive.csv      |  256 ++++
 .../src/test/resources/complexTypeDecimal.csv   |    9 +
 .../test/resources/complexTypeDecimalNested.csv |    9 +
 .../resources/complexTypeDecimalNestedHive.csv  |    8 +
 .../src/test/resources/complexdata.csv          |  100 ++
 .../src/test/resources/complexdata1.csv         |   54 +
 .../src/test/resources/complexdata2.csv         |   46 +
 .../src/test/resources/complexdatareordered.csv |   10 +
 .../test/resources/complexdatastructextra.csv   |   10 +
 .../complextypediffentcolheaderorder.csv        |  100 ++
 .../src/test/resources/complextypesample.csv    |   50 +
 .../complextypespecialchardelimiter.csv         |   50 +
 .../src/test/resources/data.csv                 |   11 +
 .../src/test/resources/data2.csv                |    4 +
 .../src/test/resources/data2_DiffTimeFormat.csv |    4 +
 .../src/test/resources/dataDiff.csv             | 1001 +++++++++++++++
 .../src/test/resources/dataIncrement.csv        |   21 +
 .../src/test/resources/dataWithEmptyRows.csv    |    2 +
 .../test/resources/dataWithNullFirstLine.csv    |   11 +
 .../src/test/resources/dataWithSingleQuote.csv  |    7 +
 .../src/test/resources/data_alltypes.csv        |   10 +
 .../src/test/resources/data_withCAPSHeader.csv  |    3 +
 .../src/test/resources/data_withMixedHeader.csv |    3 +
 .../src/test/resources/datadelimiter.csv        |   11 +
 .../src/test/resources/datanullmeasurecol.csv   |    3 +
 .../src/test/resources/dataretention1.csv       |   11 +
 .../src/test/resources/dataretention2.csv       |   11 +
 .../src/test/resources/dataretention3.csv       |   11 +
 .../src/test/resources/datasample.csv           |    4 +
 .../src/test/resources/datasamplecomplex.csv    |    2 +
 .../src/test/resources/datasamplefordate.csv    |    4 +
 .../src/test/resources/datasamplenull.csv       |    3 +
 .../src/test/resources/datasingleCol.csv        |    3 +
 .../src/test/resources/datasingleComplexCol.csv |    3 +
 .../datawithNegeativewithoutHeader.csv          |    4 +
 .../test/resources/datawithNegtiveNumber.csv    |    5 +
 .../src/test/resources/datawithbackslash.csv    |   11 +
 .../src/test/resources/datawithblanklines.csv   |   19 +
 .../resources/datawithcomplexspecialchar.csv    |  151 +++
 .../test/resources/datawithescapecharacter.csv  |   22 +
 .../src/test/resources/datawithmaxinteger.csv   |   12 +
 .../test/resources/datawithmaxmininteger.csv    |   13 +
 .../src/test/resources/datawithmininteger.csv   |   12 +
 .../src/test/resources/datawithnullmeasure.csv  |    5 +
 .../src/test/resources/datawithnullmsrs.csv     |   11 +
 .../src/test/resources/datawithoutheader.csv    |   10 +
 .../test/resources/datawithspecialcharacter.csv |   38 +
 .../resources/decimalBoundaryDataCarbon.csv     |   12 +
 .../test/resources/decimalBoundaryDataHive.csv  |   11 +
 .../test/resources/decimalDataWithHeader.csv    |   13 +
 .../test/resources/decimalDataWithoutHeader.csv |   12 +
 .../src/test/resources/deviceInformationId.csv  |   48 +
 .../src/test/resources/deviceInformationId2.csv |    6 +
 .../src/test/resources/dict.txt                 |    1 +
 .../src/test/resources/dimSample.csv            |   21 +
 .../src/test/resources/dimTableSample.csv       |    4 +
 .../src/test/resources/emptyDimensionData.csv   |   21 +
 .../test/resources/emptyDimensionDataHive.csv   |   20 +
 .../resources/emptyrow/csvwithonlyspacechar.csv |    1 +
 .../src/test/resources/emptyrow/emptyRows.csv   |    7 +
 .../src/test/resources/example-data.csv         | 1001 +++++++++++++++
 .../src/test/resources/filter/datagrtlrt.csv    |    3 +
 .../src/test/resources/filter/emp2.csv          |    9 +
 .../src/test/resources/filter/emp2allnull.csv   |    9 +
 .../src/test/resources/filter/emp2nonull.csv    |    8 +
 .../src/test/resources/integertypecube.xml      |   55 +
 .../src/test/resources/invalidMeasures.csv      |    3 +
 .../src/test/resources/join/emp.csv             |    1 +
 .../src/test/resources/join/employee.csv        | 1000 +++++++++++++++
 .../src/test/resources/join/mgr.csv             |    1 +
 .../src/test/resources/join/mobile.csv          | 1000 +++++++++++++++
 .../test/resources/lessthandatacolumndata.csv   |   11 +
 .../resources/loadMultiFiles/.invisibilityfile  |    3 +
 .../src/test/resources/loadMultiFiles/_SUCCESS  |    0
 .../src/test/resources/loadMultiFiles/data.csv  |    4 +
 .../test/resources/loadMultiFiles/emptyfile.csv |    0
 .../loadMultiFiles/nestedfolder1/data.csv       |    2 +
 .../loadMultiFiles/nestedfolder1/data1.csv      |    3 +
 .../nestedfolder1/nestedfolder2/data.csv        |    3 +
 .../src/test/resources/loadMultiFiles/non-csv   |    3 +
 .../resources/locationInfoActiveCountry.csv     |    9 +
 .../src/test/resources/mac.csv                  |   39 +
 .../src/test/resources/mobileimei.csv           |   24 +
 .../src/test/resources/noneCsvFormat.cs         |    4 +
 .../src/test/resources/nullSample.csv           |    0
 .../test/resources/nullandnonparsableValue.csv  |    3 +
 .../src/test/resources/nullmeasurevalue.csv     |   11 +
 .../test/resources/nullvalueserialization.csv   |    2 +
 .../src/test/resources/numerictypecube.xml      |   41 +
 .../spark-common-test/src/test/resources/sample |    5 +
 .../src/test/resources/sample.csv               |    5 +
 .../src/test/resources/sample.csv.bz2           |  Bin 0 -> 114 bytes
 .../src/test/resources/sample.csv.gz            |  Bin 0 -> 106 bytes
 .../src/test/resources/sampleComplex.csv        |    4 +
 .../test/resources/sample_withDelimiter017.csv  |    5 +
 .../src/test/resources/shortolap.csv            |    5 +
 .../src/test/resources/stringtypecube.xml       |   55 +
 .../src/test/resources/struct_all.csv           |    4 +
 .../src/test/resources/structusingstruct.csv    |    2 +
 .../src/test/resources/test.json                |    3 +
 .../src/test/resources/timeStampFormatData1.csv |   12 +
 .../src/test/resources/timeStampFormatData2.csv |   11 +
 .../src/test/resources/timestampdata.csv        |    2 +
 .../src/test/resources/timestamptypecube.xml    |   50 +
 .../test/resources/verticalDelimitedData.csv    |   12 +
 .../src/test/resources/windows.csv              | 1000 +++++++++++++++
 .../aggquery/IntegerDataTypeTestCase.scala      |   49 +
 .../spark/testsuite/bigdecimal/TestBigInt.scala |   92 ++
 .../TestDimensionWithDecimalDataType.scala      |   60 +
 .../complexType/TestCreateTableWithDouble.scala |   95 ++
 .../MultiFilesDataLoagdingTestCase.scala        |   51 +
 .../dataload/TestLoadDataGeneral.scala          |  139 +++
 .../TestLoadDataWithAutoLoadMerge.scala         |   59 +
 .../dataload/TestLoadDataWithBlankLine.scala    |   65 +
 .../TestLoadDataWithEmptyArrayColumns.scala     |   66 +
 .../dataload/TestLoadDataWithJunkChars.scala    |   60 +
 .../TestLoadDataWithMaxMinInteger.scala         |   96 ++
 .../dataload/TestLoadDataWithNullMeasures.scala |   51 +
 .../emptyrow/TestCSVHavingOnlySpaceChar.scala   |   62 +
 .../testsuite/emptyrow/TestEmptyRows.scala      |   78 ++
 .../aggquery/AggregateQueryTestCase.scala       |   44 +
 .../AllDataTypesTestCaseAggregate.scala         |  111 ++
 .../aggquery/AverageQueryTestCase.scala         |  112 ++
 .../AllDataTypesTestCaseAggregate.scala         | 1163 +++++++++++++++++
 .../TestQueryWithOldCarbonDataFile.scala        |   69 ++
 .../allqueries/TestQueryWithoutDataLoad.scala   |   62 +
 .../allqueries/TestTableNameHasDbName.scala     |   53 +
 .../badrecordloger/BadRecordLoggerTest.scala    |  256 ++++
 .../testsuite/bigdecimal/TestAvgForBigInt.scala |   54 +
 .../testsuite/bigdecimal/TestBigDecimal.scala   |  203 +++
 .../bigdecimal/TestNullAndEmptyFields.scala     |  120 ++
 .../blockprune/BlockPruneQueryTestCase.scala    |  113 ++
 .../DataCompactionBlockletBoundryTest.scala     |   97 ++
 .../DataCompactionBoundaryConditionsTest.scala  |   90 ++
 .../DataCompactionCardinalityBoundryTest.scala  |  126 ++
 .../datacompaction/DataCompactionLockTest.scala |  127 ++
 .../MajorCompactionIgnoreInMinorTest.scala      |  161 +++
 .../MajorCompactionStopsAfterCompaction.scala   |  141 +++
 .../TestDataLoadPartitionCoalescer.scala        |  167 +++
 .../TestDataLoadWithColumnsMoreThanSchema.scala |  139 +++
 .../TestDataWithDicExcludeAndInclude.scala      |  100 ++
 .../testsuite/dataload/TestLoadDataFrame.scala  |   94 ++
 .../dataload/TestLoadDataUseAllDictionary.scala |   56 +
 .../TestLoadDataWithDiffTimestampFormat.scala   |  138 +++
 .../TestLoadDataWithFileHeaderException.scala   |   66 +
 .../dataload/TestLoadDataWithHiveSyntax.scala   |  692 +++++++++++
 ...ataWithMalformedCarbonCommandException.scala |  162 +++
 .../dataload/TestLoadDataWithNoMeasure.scala    |  115 ++
 .../TestLoadDataWithNotProperInputFile.scala    |   74 ++
 .../dataload/TestLoadTblNameIsKeyword.scala     |   92 ++
 .../dataload/TestTableLevelBlockSize.scala      |  122 ++
 .../DataRetentionConcurrencyTestCase.scala      |  106 ++
 .../dataretention/DataRetentionTestCase.scala   |  326 +++++
 .../deleteTable/TestDeleteTableNewDDL.scala     |  259 ++++
 .../describeTable/TestDescribeTable.scala       |   53 +
 .../detailquery/AllDataTypesTestCase.scala      |   52 +
 .../ColumnGroupDataTypesTestCase.scala          |  148 +++
 .../ColumnPropertyValidationTestCase.scala      |   52 +
 .../HighCardinalityDataTypesTestCase.scala      |  253 ++++
 .../detailquery/IntegerDataTypeTestCase.scala   |   47 +
 .../NoDictionaryColumnTestCase.scala            |   74 ++
 .../SubqueryWithFilterAndSortTestCase.scala     |   81 ++
 .../ValueCompressionDataTypeTestCase.scala      |  142 +++
 .../DateDataTypeDirectDictionaryTest.scala      |  150 +++
 ...TypeDirectDictionaryWithNoDictTestCase.scala |   98 ++
 .../DateDataTypeNullDataTest.scala              |   83 ++
 ...estampDataTypeDirectDictionaryTestCase.scala |  152 +++
 ...TypeDirectDictionaryWithNoDictTestCase.scala |  100 ++
 .../TimestampDataTypeNullDataTest.scala         |   89 ++
 .../filterexpr/AllDataTypesTestCaseFilter.scala |   64 +
 .../filterexpr/CountStarTestCase.scala          |   69 ++
 .../filterexpr/FilterProcessorTestCase.scala    |  289 +++++
 .../GrtLtFilterProcessorTestCase.scala          |  192 +++
 .../filterexpr/IntegerDataTypeTestCase.scala    |   47 +
 .../NullMeasureValueTestCaseFilter.scala        |   58 +
 .../TestAndEqualFilterEmptyOperandValue.scala   |   84 ++
 .../filterexpr/TestGrtLessFilter.scala          |   92 ++
 .../joinquery/AllDataTypesTestCaseJoin.scala    |   69 ++
 .../joinquery/IntegerDataTypeTestCase.scala     |   47 +
 .../NullMeasureValueTestCaseAggregate.scala     |   84 ++
 .../TestNullValueSerialization.scala            |  108 ++
 .../sortexpr/AllDataTypesTestCaseSort.scala     |   52 +
 .../sortexpr/IntegerDataTypeTestCase.scala      |   48 +
 .../windowsexpr/WindowsExprTestCase.scala       |   71 ++
 .../spark/sql/common/util/CarbonFunSuite.scala  |   49 +
 .../apache/spark/sql/common/util/PlanTest.scala |   59 +
 .../spark/sql/common/util/QueryTest.scala       |  159 +++
 integration/spark-common/pom.xml                |   51 +-
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala |    5 +-
 .../spark/sql/test/TestQueryExecutor.scala      |   59 +
 .../org/apache/spark/sql/CarbonContext.scala    |    6 +-
 .../execution/command/carbonTableSchema.scala   |   14 +-
 .../spark/sql/test/SparkTestQueryExecutor.scala |   55 +
 ...che.spark.sql.test.TestQueryExecutorRegister |    1 +
 .../spark/load/CarbonLoaderUtilTest.java        |  419 -------
 .../validation/FileFooterValidator.java         |  157 ---
 .../spark/src/test/resources/100_olap.csv       |   99 --
 .../spark/src/test/resources/10dim_4msr.csv     | 1000 ---------------
 .../spark/src/test/resources/IUD/T_Hive1.csv    |   10 -
 .../spark/src/test/resources/IUD/comp1.csv      |   11 -
 .../spark/src/test/resources/IUD/comp2.csv      |   11 -
 .../spark/src/test/resources/IUD/comp3.csv      |   11 -
 .../spark/src/test/resources/IUD/comp4.csv      |   11 -
 .../spark/src/test/resources/IUD/dest.csv       |    6 -
 .../spark/src/test/resources/IUD/other.csv      |    3 -
 .../spark/src/test/resources/IUD/sample.csv     |    4 -
 .../src/test/resources/IUD/sample_updated.csv   |    2 -
 .../spark/src/test/resources/IUD/source2.csv    |    3 -
 .../spark/src/test/resources/IUD/source3.csv    |    7 -
 .../spark/src/test/resources/IUD/update01.csv   |    6 -
 .../spark/src/test/resources/OLDFORMATTABLE.csv |   34 -
 .../src/test/resources/OLDFORMATTABLEHIVE.csv   |   33 -
 .../test/resources/Test_Data1_Logrithmic.csv    |    3 -
 .../src/test/resources/alldatatypescube.xml     |  109 --
 .../20160423/1400_1405/complex.dictionary       |   20 -
 .../sample/20160423/1400_1405/sample.dictionary |    9 -
 integration/spark/src/test/resources/array1.csv |    2 -
 .../src/test/resources/arrayColumnEmpty.csv     |   21 -
 .../spark/src/test/resources/avgTest.csv        |   16 -
 .../test/resources/badrecords/datasample.csv    |    7 -
 .../badrecords/emptyTimeStampValue.csv          |    8 -
 .../test/resources/badrecords/emptyValues.csv   |    8 -
 .../badrecords/insufficientColumns.csv          |    4 -
 .../resources/badrecords/seriazableValue.csv    |    3 -
 .../spark/src/test/resources/bigIntData.csv     |   14 -
 .../src/test/resources/bigIntDataWithHeader.csv |   13 -
 .../test/resources/bigIntDataWithoutHeader.csv  |   12 -
 .../src/test/resources/big_int_Decimal.csv      |    3 -
 .../spark/src/test/resources/channelsId.csv     |   10 -
 .../src/test/resources/character_carbon.csv     |   33 -
 .../spark/src/test/resources/character_hive.csv |   32 -
 .../test/resources/columndictionary/country.csv |    5 -
 .../test/resources/columndictionary/name.csv    |   10 -
 .../spark/src/test/resources/comment.csv        |    5 -
 .../test/resources/compaction/compaction1.csv   |    6 -
 .../compaction/compaction1_forhive.csv          |    5 -
 .../test/resources/compaction/compaction2.csv   |    6 -
 .../test/resources/compaction/compaction3.csv   |    6 -
 .../resources/compaction/compactioncard2.csv    |  257 ----
 .../compaction/compactioncard2_forhive.csv      |  256 ----
 .../src/test/resources/complexTypeDecimal.csv   |    9 -
 .../test/resources/complexTypeDecimalNested.csv |    9 -
 .../resources/complexTypeDecimalNestedHive.csv  |    8 -
 .../spark/src/test/resources/complexdata.csv    |  100 --
 .../spark/src/test/resources/complexdata1.csv   |   54 -
 .../spark/src/test/resources/complexdata2.csv   |   46 -
 .../src/test/resources/complexdatareordered.csv |   10 -
 .../test/resources/complexdatastructextra.csv   |   10 -
 .../complextypediffentcolheaderorder.csv        |  100 --
 .../src/test/resources/complextypesample.csv    |   50 -
 .../complextypespecialchardelimiter.csv         |   50 -
 integration/spark/src/test/resources/data.csv   |   11 -
 integration/spark/src/test/resources/data2.csv  |    4 -
 .../src/test/resources/data2_DiffTimeFormat.csv |    4 -
 .../spark/src/test/resources/dataDiff.csv       | 1001 ---------------
 .../spark/src/test/resources/dataIncrement.csv  |   21 -
 .../src/test/resources/dataWithEmptyRows.csv    |    2 -
 .../test/resources/dataWithNullFirstLine.csv    |   11 -
 .../src/test/resources/dataWithSingleQuote.csv  |    7 -
 .../src/test/resources/data_withCAPSHeader.csv  |    3 -
 .../src/test/resources/data_withMixedHeader.csv |    3 -
 .../spark/src/test/resources/datadelimiter.csv  |   11 -
 .../src/test/resources/datanullmeasurecol.csv   |    3 -
 .../spark/src/test/resources/dataretention1.csv |   11 -
 .../spark/src/test/resources/dataretention2.csv |   11 -
 .../spark/src/test/resources/dataretention3.csv |   11 -
 .../spark/src/test/resources/datasample.csv     |    4 -
 .../src/test/resources/datasamplecomplex.csv    |    2 -
 .../src/test/resources/datasamplefordate.csv    |    4 -
 .../spark/src/test/resources/datasamplenull.csv |    3 -
 .../spark/src/test/resources/datasingleCol.csv  |    3 -
 .../src/test/resources/datasingleComplexCol.csv |    3 -
 .../datawithNegeativewithoutHeader.csv          |    4 -
 .../test/resources/datawithNegtiveNumber.csv    |    5 -
 .../src/test/resources/datawithbackslash.csv    |   11 -
 .../src/test/resources/datawithblanklines.csv   |   19 -
 .../resources/datawithcomplexspecialchar.csv    |  151 ---
 .../test/resources/datawithescapecharacter.csv  |   22 -
 .../src/test/resources/datawithmaxinteger.csv   |   12 -
 .../test/resources/datawithmaxmininteger.csv    |   13 -
 .../src/test/resources/datawithmininteger.csv   |   12 -
 .../src/test/resources/datawithnullmeasure.csv  |    5 -
 .../src/test/resources/datawithnullmsrs.csv     |   11 -
 .../src/test/resources/datawithoutheader.csv    |   10 -
 .../test/resources/datawithspecialcharacter.csv |   38 -
 .../resources/decimalBoundaryDataCarbon.csv     |   12 -
 .../test/resources/decimalBoundaryDataHive.csv  |   11 -
 .../test/resources/decimalDataWithHeader.csv    |   13 -
 .../test/resources/decimalDataWithoutHeader.csv |   12 -
 .../src/test/resources/deviceInformationId.csv  |   48 -
 .../src/test/resources/deviceInformationId2.csv |    6 -
 integration/spark/src/test/resources/dict.txt   |    1 -
 .../spark/src/test/resources/dimSample.csv      |   21 -
 .../spark/src/test/resources/dimTableSample.csv |    4 -
 .../src/test/resources/emptyDimensionData.csv   |   21 -
 .../test/resources/emptyDimensionDataHive.csv   |   20 -
 .../resources/emptyrow/csvwithonlyspacechar.csv |    1 -
 .../src/test/resources/emptyrow/emptyRows.csv   |    7 -
 .../spark/src/test/resources/example-data.csv   | 1001 ---------------
 .../src/test/resources/filter/datagrtlrt.csv    |    3 -
 .../spark/src/test/resources/filter/emp2.csv    |    9 -
 .../src/test/resources/filter/emp2allnull.csv   |    9 -
 .../src/test/resources/filter/emp2nonull.csv    |    8 -
 .../src/test/resources/integertypecube.xml      |   55 -
 .../src/test/resources/invalidMeasures.csv      |    3 -
 .../spark/src/test/resources/join/emp.csv       |    1 -
 .../spark/src/test/resources/join/employee.csv  | 1000 ---------------
 .../spark/src/test/resources/join/mgr.csv       |    1 -
 .../spark/src/test/resources/join/mobile.csv    | 1000 ---------------
 .../test/resources/lessthandatacolumndata.csv   |   11 -
 .../resources/loadMultiFiles/.invisibilityfile  |    3 -
 .../src/test/resources/loadMultiFiles/_SUCCESS  |    0
 .../src/test/resources/loadMultiFiles/data.csv  |    4 -
 .../test/resources/loadMultiFiles/emptyfile.csv |    0
 .../loadMultiFiles/nestedfolder1/data.csv       |    2 -
 .../loadMultiFiles/nestedfolder1/data1.csv      |    3 -
 .../nestedfolder1/nestedfolder2/data.csv        |    3 -
 .../src/test/resources/loadMultiFiles/non-csv   |    3 -
 .../resources/locationInfoActiveCountry.csv     |    9 -
 integration/spark/src/test/resources/mac.csv    |   39 -
 .../spark/src/test/resources/mobileimei.csv     |   24 -
 .../spark/src/test/resources/noneCsvFormat.cs   |    4 -
 .../spark/src/test/resources/nullSample.csv     |    0
 .../test/resources/nullandnonparsableValue.csv  |    3 -
 .../src/test/resources/nullmeasurevalue.csv     |   11 -
 .../test/resources/nullvalueserialization.csv   |    2 -
 .../src/test/resources/numerictypecube.xml      |   41 -
 integration/spark/src/test/resources/sample     |    5 -
 integration/spark/src/test/resources/sample.csv |    5 -
 .../spark/src/test/resources/sample.csv.bz2     |  Bin 114 -> 0 bytes
 .../spark/src/test/resources/sample.csv.gz      |  Bin 106 -> 0 bytes
 .../spark/src/test/resources/sampleComplex.csv  |    4 -
 .../test/resources/sample_withDelimiter017.csv  |    5 -
 .../spark/src/test/resources/shortolap.csv      |    5 -
 .../spark/src/test/resources/stringtypecube.xml |   55 -
 .../spark/src/test/resources/struct_all.csv     |    4 -
 .../src/test/resources/structusingstruct.csv    |    2 -
 integration/spark/src/test/resources/test.json  |    3 -
 .../src/test/resources/timeStampFormatData1.csv |   12 -
 .../src/test/resources/timeStampFormatData2.csv |   11 -
 .../spark/src/test/resources/timestampdata.csv  |    2 -
 .../src/test/resources/timestamptypecube.xml    |   50 -
 .../test/resources/verticalDelimitedData.csv    |   12 -
 .../spark/src/test/resources/windows.csv        | 1000 ---------------
 .../aggquery/IntegerDataTypeTestCase.scala      |   49 -
 .../spark/testsuite/bigdecimal/TestBigInt.scala |   93 --
 .../TestDimensionWithDecimalDataType.scala      |   61 -
 ...plexPrimitiveTimestampDirectDictionary.scala |   17 +-
 .../complexType/TestComplexTypeQuery.scala      |   25 +-
 .../complexType/TestCreateTableWithDouble.scala |   98 --
 .../MultiFilesDataLoagdingTestCase.scala        |   59 -
 .../dataload/SparkDatasourceSuite.scala         |   55 +-
 .../dataload/TestLoadDataGeneral.scala          |  145 ---
 .../TestLoadDataWithAutoLoadMerge.scala         |   65 -
 .../dataload/TestLoadDataWithBlankLine.scala    |   66 -
 .../TestLoadDataWithEmptyArrayColumns.scala     |   66 -
 .../dataload/TestLoadDataWithJunkChars.scala    |   61 -
 .../TestLoadDataWithMaxMinInteger.scala         |   98 --
 .../dataload/TestLoadDataWithNullMeasures.scala |   53 -
 .../TestLoadDataWithSingleQuotechar.scala       |    7 +-
 .../emptyrow/TestCSVHavingOnlySpaceChar.scala   |   67 -
 .../testsuite/emptyrow/TestEmptyRows.scala      |   82 --
 .../aggquery/AggregateQueryTestCase.scala       |   45 -
 .../AllDataTypesTestCaseAggregate.scala         |  111 --
 .../aggquery/AverageQueryTestCase.scala         |  112 --
 .../AllDataTypesTestCaseAggregate.scala         | 1107 +----------------
 .../InsertIntoCarbonTableTestCase.scala         |   20 +-
 .../TestQueryWithOldCarbonDataFile.scala        |   70 --
 .../allqueries/TestQueryWithoutDataLoad.scala   |   63 -
 .../allqueries/TestTableNameHasDbName.scala     |   54 -
 .../badrecordloger/BadRecordLoggerTest.scala    |  266 ----
 .../testsuite/bigdecimal/TestAvgForBigInt.scala |   59 -
 .../testsuite/bigdecimal/TestBigDecimal.scala   |  204 ---
 .../bigdecimal/TestNullAndEmptyFields.scala     |  124 --
 .../blockprune/BlockPruneQueryTestCase.scala    |  115 --
 .../createtable/TestCreateTableSyntax.scala     |   39 +-
 .../CompactionSystemLockFeatureTest.scala       |   19 +-
 .../DataCompactionBlockletBoundryTest.scala     |  110 --
 .../DataCompactionBoundaryConditionsTest.scala  |  104 --
 .../DataCompactionCardinalityBoundryTest.scala  |  128 --
 .../datacompaction/DataCompactionLockTest.scala |  130 --
 .../DataCompactionMinorThresholdTest.scala      |   23 +-
 .../DataCompactionNoDictionaryTest.scala        |   18 +-
 .../datacompaction/DataCompactionTest.scala     |   16 +-
 .../MajorCompactionIgnoreInMinorTest.scala      |  170 ---
 .../MajorCompactionStopsAfterCompaction.scala   |  147 ---
 .../TestDataLoadPartitionCoalescer.scala        |  168 ---
 .../TestDataLoadWithColumnsMoreThanSchema.scala |  139 ---
 .../TestDataWithDicExcludeAndInclude.scala      |  106 --
 .../testsuite/dataload/TestLoadDataFrame.scala  |   95 --
 .../dataload/TestLoadDataUseAllDictionary.scala |   56 -
 .../TestLoadDataWithDiffTimestampFormat.scala   |  138 ---
 .../TestLoadDataWithFileHeaderException.scala   |   67 -
 .../dataload/TestLoadDataWithHiveSyntax.scala   |  695 -----------
 ...ataWithMalformedCarbonCommandException.scala |  163 ---
 .../dataload/TestLoadDataWithNoMeasure.scala    |  128 --
 .../TestLoadDataWithNotProperInputFile.scala    |   77 --
 .../dataload/TestLoadDataWithSinglePass.scala   |   23 +-
 .../dataload/TestLoadTblNameIsKeyword.scala     |   96 --
 .../TestNoInvertedIndexLoadAndQuery.scala       |   17 +-
 .../dataload/TestTableLevelBlockSize.scala      |  128 --
 .../DataRetentionConcurrencyTestCase.scala      |  113 --
 .../dataretention/DataRetentionTestCase.scala   |  334 -----
 .../deleteTable/TestDeleteTableNewDDL.scala     |  254 ----
 .../describeTable/TestDescribeTable.scala       |   56 -
 .../detailquery/AllDataTypesTestCase.scala      |   54 -
 .../ColumnGroupDataTypesTestCase.scala          |  149 ---
 .../ColumnPropertyValidationTestCase.scala      |   51 -
 .../HighCardinalityDataTypesTestCase.scala      |  250 ----
 .../detailquery/IntegerDataTypeTestCase.scala   |   48 -
 .../NoDictionaryColumnTestCase.scala            |   76 --
 .../SubqueryWithFilterAndSortTestCase.scala     |   82 --
 .../ValueCompressionDataTypeTestCase.scala      |  145 ---
 .../DateDataTypeDirectDictionaryTest.scala      |  154 ---
 ...TypeDirectDictionaryWithNoDictTestCase.scala |  101 --
 .../DateDataTypeNullDataTest.scala              |   88 --
 ...estampDataTypeDirectDictionaryTestCase.scala |  157 ---
 ...TypeDirectDictionaryWithNoDictTestCase.scala |  107 --
 .../TimestampDataTypeNullDataTest.scala         |   92 --
 .../filterexpr/AllDataTypesTestCaseFilter.scala |   66 -
 .../filterexpr/CountStarTestCase.scala          |   73 --
 .../filterexpr/FilterProcessorTestCase.scala    |  289 -----
 .../GrtLtFilterProcessorTestCase.scala          |  137 +-
 .../filterexpr/IntegerDataTypeTestCase.scala    |   48 -
 .../NullMeasureValueTestCaseFilter.scala        |   58 -
 .../TestAndEqualFilterEmptyOperandValue.scala   |   89 --
 .../filterexpr/TestGrtLessFilter.scala          |   96 --
 .../HadoopFSRelationTestCase.scala              |   13 +-
 .../iud/DeleteCarbonTableTestCase.scala         |   15 +-
 .../testsuite/iud/IUDCompactionTestCases.scala  |   21 +-
 .../iud/UpdateCarbonTableTestCase.scala         |   58 +-
 .../joinquery/AllDataTypesTestCaseJoin.scala    |   68 -
 .../joinquery/IntegerDataTypeTestCase.scala     |   48 -
 .../NullMeasureValueTestCaseAggregate.scala     |   85 --
 .../TestNullValueSerialization.scala            |  112 --
 .../sortexpr/AllDataTypesTestCaseSort.scala     |   54 -
 .../sortexpr/IntegerDataTypeTestCase.scala      |   49 -
 .../windowsexpr/WindowsExprTestCase.scala       |   78 --
 .../spark/util/AllDictionaryTestCase.scala      |   16 +-
 .../AutoHighCardinalityIdentifyTestCase.scala   |   15 +-
 .../spark/util/DictionaryTestCaseUtil.scala     |    5 +-
 .../util/ExternalColumnDictionaryTestCase.scala |   45 +-
 ...GlobalDictionaryUtilConcurrentTestCase.scala |   21 +-
 .../util/GlobalDictionaryUtilTestCase.scala     |   37 +-
 .../apache/spark/sql/TestCarbonSqlParser.scala  |    1 +
 .../sql/common/util/CarbonHiveContext.scala     |   53 -
 .../spark/sql/common/util/QueryTest.scala       |   11 +-
 .../sql/execution/command/DDLStrategy.scala     |    9 +-
 .../execution/command/carbonTableSchema.scala   |   19 +-
 .../apache/spark/sql/hive/CarbonMetastore.scala |    2 +-
 .../sql/parser/CarbonSpark2SqlParser.scala      |    3 +-
 .../spark/sql/parser/CarbonSparkSqlParser.scala |   17 +-
 .../sql/test/Spark2TestQueryExecutor.scala      |   61 +
 ....apache.spark.sql.sources.DataSourceRegister |    1 +
 ...che.spark.sql.test.TestQueryExecutorRegister |    1 +
 integration/spark2/src/test/resources/data.csv  |   11 -
 .../spark2/src/test/resources/dataDiff.csv      | 1001 ---------------
 .../spark2/src/test/resources/data_alltypes.csv |   10 -
 .../AllDataTypesTestCaseAggregate.scala         | 1166 ------------------
 .../InsertIntoCarbonTableTestCase.scala         |  162 ---
 .../carbondata/CarbonDataSourceSuite.scala      |   19 +-
 .../bucketing/TableBucketingTestCase.scala      |   75 +-
 .../spark/carbondata/util/QueryTest.scala       |    0
 .../vectorreader/VectorReaderTestCase.scala     |   17 +-
 .../spark/sql/common/util/QueryTest.scala       |   50 +-
 .../apache/spark/util/CarbonCommandSuite.scala  |    5 +-
 pom.xml                                         |    1 +
 514 files changed, 20039 insertions(+), 21976 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/core/src/main/java/org/apache/carbondata/scan/expression/ExpressionResult.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/scan/expression/ExpressionResult.java b/core/src/main/java/org/apache/carbondata/scan/expression/ExpressionResult.java
index 407a1d7..408fb1d 100644
--- a/core/src/main/java/org/apache/carbondata/scan/expression/ExpressionResult.java
+++ b/core/src/main/java/org/apache/carbondata/scan/expression/ExpressionResult.java
@@ -85,6 +85,9 @@ public class ExpressionResult implements Comparable<ExpressionResult> {
           if (value instanceof Double) {
             return ((Double) value).intValue();
           }
+          if (value instanceof Long) {
+            return ((Long) value).intValue();
+          }
           return (Integer) value;
         case DATE:
           if (value instanceof java.sql.Date) {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/pom.xml
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/pom.xml b/integration/spark-common-test/pom.xml
new file mode 100644
index 0000000..91b27dc
--- /dev/null
+++ b/integration/spark-common-test/pom.xml
@@ -0,0 +1,232 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+    Licensed to the Apache Software Foundation (ASF) under one or more
+    contributor license agreements.  See the NOTICE file distributed with
+    this work for additional information regarding copyright ownership.
+    The ASF licenses this file to You under the Apache License, Version 2.0
+    (the "License"); you may not use this file except in compliance with
+    the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+  <modelVersion>4.0.0</modelVersion>
+
+  <parent>
+    <groupId>org.apache.carbondata</groupId>
+    <artifactId>carbondata-parent</artifactId>
+    <version>1.0.0-incubating-SNAPSHOT</version>
+    <relativePath>../../pom.xml</relativePath>
+  </parent>
+
+  <artifactId>carbondata-spark-common-test</artifactId>
+  <name>Apache CarbonData :: Spark Common Test</name>
+
+  <properties>
+    <dev.path>${basedir}/../../dev</dev.path>
+  </properties>
+
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.carbondata</groupId>
+      <artifactId>carbondata-spark-common</artifactId>
+      <version>${project.version}</version>
+      <scope>test</scope>
+      <exclusions>
+        <exclusion>
+          <groupId>org.apache.spark</groupId>
+          <artifactId>spark-hive-thriftserver_2.10</artifactId>
+        </exclusion>
+      </exclusions>
+    </dependency>
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId>
+      <scope>test</scope>
+    </dependency>
+    <dependency>
+      <groupId>junit</groupId>
+      <artifactId>junit</artifactId>
+    </dependency>
+    <dependency>
+      <groupId>org.scalatest</groupId>
+      <artifactId>scalatest_${scala.binary.version}</artifactId>
+      <version>2.2.1</version>
+      <scope>test</scope>
+    </dependency>
+  </dependencies>
+
+  <build>
+    <testSourceDirectory>src/test/scala</testSourceDirectory>
+    <resources>
+      <resource>
+        <directory>src/resources</directory>
+      </resource>
+      <resource>
+        <directory>.</directory>
+        <includes>
+          <include>CARBON_SPARK_INTERFACELogResource.properties</include>
+        </includes>
+      </resource>
+    </resources>
+    <plugins>
+      <plugin>
+        <groupId>org.scala-tools</groupId>
+        <artifactId>maven-scala-plugin</artifactId>
+        <version>2.15.2</version>
+        <executions>
+          <execution>
+            <id>compile</id>
+            <goals>
+              <goal>compile</goal>
+            </goals>
+            <phase>compile</phase>
+          </execution>
+          <execution>
+            <id>testCompile</id>
+            <goals>
+              <goal>testCompile</goal>
+            </goals>
+            <phase>test</phase>
+          </execution>
+          <execution>
+            <phase>process-resources</phase>
+            <goals>
+              <goal>compile</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+      <plugin>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <configuration>
+          <source>1.7</source>
+          <target>1.7</target>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-surefire-plugin</artifactId>
+        <version>2.18</version>
+        <!-- Note config is repeated in scalatest config -->
+        <configuration>
+          <includes>
+            <include>**/Test*.java</include>
+            <include>**/*Test.java</include>
+            <include>**/*TestCase.java</include>
+            <include>**/*Suite.java</include>
+          </includes>
+          <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
+          <argLine>-Xmx3g -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m</argLine>
+          <systemProperties>
+            <java.awt.headless>true</java.awt.headless>
+          </systemProperties>
+          <failIfNoTests>false</failIfNoTests>
+        </configuration>
+      </plugin>
+      <plugin>
+        <groupId>org.scalatest</groupId>
+        <artifactId>scalatest-maven-plugin</artifactId>
+        <version>1.0</version>
+        <!-- Note config is repeated in surefire config -->
+        <configuration>
+          <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
+          <junitxml>.</junitxml>
+          <filereports>CarbonTestSuite.txt</filereports>
+          <argLine>-ea -Xmx3g -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m 
+          </argLine>
+          <stderr />
+          <environmentVariables>
+          </environmentVariables>
+          <systemProperties>
+            <java.awt.headless>true</java.awt.headless>
+            <use.kettle>${use.kettle}</use.kettle>
+          </systemProperties>
+        </configuration>
+        <executions>
+          <execution>
+            <id>test</id>
+            <goals>
+              <goal>test</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+    </plugins>
+  </build>
+  <profiles>
+    <profile>
+      <id>spark-1.5</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.carbondata</groupId>
+          <artifactId>carbondata-spark</artifactId>
+          <version>${project.version}</version>
+          <scope>test</scope>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.spark</groupId>
+              <artifactId>spark-hive-thriftserver_2.10</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.spark</groupId>
+              <artifactId>spark-repl_2.10</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+      </dependencies>
+    </profile>
+    <profile>
+      <id>spark-1.6</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.carbondata</groupId>
+          <artifactId>carbondata-spark</artifactId>
+          <version>${project.version}</version>
+          <scope>test</scope>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.spark</groupId>
+              <artifactId>spark-hive-thriftserver_2.10</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.spark</groupId>
+              <artifactId>spark-repl_2.10</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+      </dependencies>
+    </profile>
+    <profile>
+      <id>spark-2.1</id>
+      <dependencies>
+        <dependency>
+          <groupId>org.apache.carbondata</groupId>
+          <artifactId>carbondata-spark2</artifactId>
+          <version>${project.version}</version>
+          <scope>test</scope>
+          <exclusions>
+            <exclusion>
+              <groupId>org.apache.spark</groupId>
+              <artifactId>spark-hive-thriftserver_2.10</artifactId>
+            </exclusion>
+            <exclusion>
+              <groupId>org.apache.spark</groupId>
+              <artifactId>spark-repl_2.10</artifactId>
+            </exclusion>
+          </exclusions>
+        </dependency>
+      </dependencies>
+    </profile>
+  </profiles>
+</project>

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/java/org/carbondata/integration/spark/load/CarbonLoaderUtilTest.java
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/java/org/carbondata/integration/spark/load/CarbonLoaderUtilTest.java b/integration/spark-common-test/src/test/java/org/carbondata/integration/spark/load/CarbonLoaderUtilTest.java
new file mode 100644
index 0000000..ed4f95b
--- /dev/null
+++ b/integration/spark-common-test/src/test/java/org/carbondata/integration/spark/load/CarbonLoaderUtilTest.java
@@ -0,0 +1,419 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.integration.spark.load;
+
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.LinkedHashMap;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.carbondata.core.carbon.datastore.block.Distributable;
+import org.apache.carbondata.core.carbon.datastore.block.TableBlockInfo;
+import org.apache.carbondata.spark.load.CarbonLoaderUtil;
+import org.junit.Assert;
+import org.junit.Test;
+
+/**
+ * Test class to test block distribution functionality
+ */
+public class CarbonLoaderUtilTest {
+  List<Distributable> blockInfos = null;
+  int noOfNodesInput = -1;
+  List<String> activeNode = null;
+  Map<String, List<Distributable>> expected = null;
+  Map<String, List<Distributable>> mapOfNodes = null;
+
+  @Test public void nodeBlockMapping() throws Exception {
+
+    // scenario when the 3 nodes and 3 executors
+    initSet1();
+    Map<String, List<Distributable>> mapOfNodes =
+            CarbonLoaderUtil.nodeBlockMapping(blockInfos, noOfNodesInput, activeNode);
+    // node allocation
+    Assert.assertTrue("Node Allocation", expected.size() == mapOfNodes.size());
+    // block allocation
+    boolean isEqual = compareResult(expected, mapOfNodes);
+    Assert.assertTrue("Block Allocation", isEqual);
+
+    // 2 node and 3 executors
+    initSet2();
+    mapOfNodes = CarbonLoaderUtil.nodeBlockMapping(blockInfos, noOfNodesInput, activeNode);
+    // node allocation
+    Assert.assertTrue("Node Allocation", expected.size() == mapOfNodes.size());
+    // block allocation
+    isEqual = compareResult(expected, mapOfNodes);
+    Assert.assertTrue("Block Allocation", isEqual);
+
+    // 3 data node and 2 executors
+    initSet3();
+    mapOfNodes = CarbonLoaderUtil.nodeBlockMapping(blockInfos, noOfNodesInput, activeNode);
+    // node allocation
+    Assert.assertTrue("Node Allocation", expected.size() == mapOfNodes.size());
+    // block allocation
+    isEqual = compareResult(expected, mapOfNodes);
+    Assert.assertTrue("Block Allocation", isEqual);
+  }
+
+  /**
+   * compares the blocks allocation
+   *
+   * @param expectedResult
+   * @param actualResult
+   * @return
+   */
+  private boolean compareResult(Map<String, List<Distributable>> expectedResult,
+                                Map<String, List<Distributable>> actualResult) {
+    expectedResult = sortByListSize(expectedResult);
+    actualResult = sortByListSize(actualResult);
+    List<List<Distributable>> expectedList = new LinkedList(expectedResult.entrySet());
+    List<List<Distributable>> mapOfNodesList = new LinkedList(actualResult.entrySet());
+    boolean isEqual = expectedList.size() == mapOfNodesList.size();
+    if (isEqual) {
+      for (int i = 0; i < expectedList.size(); i++) {
+        int size1 = ((List) ((Map.Entry) (expectedList.get(i))).getValue()).size();
+        int size2 = ((List) ((Map.Entry) (mapOfNodesList.get(i))).getValue()).size();
+        isEqual = size1 == size2;
+        if (!isEqual) {
+          break;
+        }
+      }
+    }
+    return isEqual;
+  }
+
+  /**
+   * sort by list size
+   *
+   * @param map
+   * @return
+   */
+  private static Map<String, List<Distributable>> sortByListSize(
+          Map<String, List<Distributable>> map) {
+    List<List<Distributable>> list = new LinkedList(map.entrySet());
+    Collections.sort(list, new Comparator() {
+      public int compare(Object obj1, Object obj2) {
+        if (obj1 == null && obj2 == null) {
+          return 0;
+        } else if (obj1 == null) {
+          return 1;
+        } else if (obj2 == null) {
+          return -1;
+        }
+        int size1 = ((List) ((Map.Entry) (obj1)).getValue()).size();
+        int size2 = ((List) ((Map.Entry) (obj2)).getValue()).size();
+        return size2 - size1;
+      }
+    });
+
+    Map res = new LinkedHashMap();
+    for (Iterator it = list.iterator(); it.hasNext(); ) {
+      Map.Entry entry = (Map.Entry) it.next();
+      res.put(entry.getKey(), entry.getValue());
+    }
+    return res;
+  }
+
+  void initSet1() {
+    blockInfos = new ArrayList<>();
+    activeNode = new ArrayList<>();
+    activeNode.add("node-7");
+    activeNode.add("node-9");
+    activeNode.add("node-11");
+    String[] location = { "node-7", "node-9", "node-11" };
+    blockInfos.add(new TableBlockInfo("node", 1, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 2, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 3, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 4, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 5, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 6, "1", location, 0));
+    expected = new HashMap<>();
+    expected.put("node-7", blockInfos.subList(0, 2));
+    expected.put("node-9", blockInfos.subList(2, 4));
+    expected.put("node-11", blockInfos.subList(4, 6));
+  }
+
+  void initSet2() {
+    blockInfos = new ArrayList<>();
+    activeNode = new ArrayList<>();
+    activeNode.add("node-7");
+    activeNode.add("node-9");
+    activeNode.add("node-11");
+    String[] location = { "node-7", "node-11" };
+    blockInfos.add(new TableBlockInfo("node", 1, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 2, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 3, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 4, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 5, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 6, "1", location, 0));
+    expected = new HashMap<>();
+    expected.put("node-7", blockInfos.subList(0, 2));
+    expected.put("node-9", blockInfos.subList(2, 4));
+    expected.put("node-11", blockInfos.subList(4, 6));
+  }
+
+  void initSet3() {
+    blockInfos = new ArrayList<>();
+    activeNode = new ArrayList<>();
+    activeNode.add("node-7");
+    activeNode.add("node-11");
+    String[] location = { "node-7", "node-9", "node-11" };
+    blockInfos.add(new TableBlockInfo("node", 1, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 2, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 3, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 4, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 5, "1", location, 0));
+    blockInfos.add(new TableBlockInfo("node", 6, "1", location, 0));
+    expected = new HashMap<>();
+    expected.put("node-7", blockInfos.subList(0, 3));
+    expected.put("node-11", blockInfos.subList(3, 6));
+  }
+
+
+  /**
+   * Test case with 4 blocks and 4 nodes with 3 replication.
+   *
+   * @throws Exception
+   */
+  @Test public void nodeBlockMapping() throws Exception {
+
+    Map<TableBlockInfo, List<String>> inputMap = new HashMap<TableBlockInfo, List<String>>(5);
+
+    TableBlockInfo block1 =
+        new TableBlockInfo("path1", 123, "1", new String[] { "1", "2", "3" }, 111);
+    TableBlockInfo block2 =
+        new TableBlockInfo("path2", 123, "2", new String[] { "2", "3", "4" }, 111);
+    TableBlockInfo block3 =
+        new TableBlockInfo("path3", 123, "3", new String[] { "3", "4", "1" }, 111);
+    TableBlockInfo block4 =
+        new TableBlockInfo("path4", 123, "4", new String[] { "1", "2", "4" }, 111);
+
+    inputMap.put(block1, Arrays.asList(new String[]{"1","2","3"}));
+    inputMap.put(block2, Arrays.asList(new String[]{"2","3","4"}));
+    inputMap.put(block3, Arrays.asList(new String[]{"3","4","1"}));
+    inputMap.put(block4, Arrays.asList(new String[]{"1","2","4"}));
+
+    List<TableBlockInfo> inputBlocks = new ArrayList(6);
+    inputBlocks.add(block1);
+    inputBlocks.add(block2);
+    inputBlocks.add(block3);
+    inputBlocks.add(block4);
+
+    Map<String, List<TableBlockInfo>> outputMap
+        = CarbonLoaderUtil.nodeBlockMapping(inputBlocks, 4);
+
+    Assert.assertTrue(calculateBlockDistribution(inputMap, outputMap, 4, 4));
+
+    Assert.assertTrue(calculateBlockLocality(inputMap, outputMap, 4, 4));
+  }
+
+  private boolean calculateBlockLocality(Map<TableBlockInfo, List<String>> inputMap,
+      Map<String, List<TableBlockInfo>> outputMap, int numberOfBlocks, int numberOfNodes) {
+
+    double notInNodeLocality = 0;
+    for (Map.Entry<String, List<TableBlockInfo>> entry : outputMap.entrySet()) {
+
+      List<TableBlockInfo> blockListOfANode = entry.getValue();
+
+      for (TableBlockInfo eachBlock : blockListOfANode) {
+
+        // for each block check the node locality
+
+        List<String> blockLocality = inputMap.get(eachBlock);
+        if (!blockLocality.contains(entry.getKey())) {
+          notInNodeLocality++;
+        }
+      }
+    }
+
+    System.out.println(
+        ((notInNodeLocality / numberOfBlocks) * 100) + " " + "is the node locality mismatch");
+    if ((notInNodeLocality / numberOfBlocks) * 100 > 30) {
+      return false;
+    }
+    return true;
+  }
+
+  private boolean calculateBlockDistribution(Map<TableBlockInfo, List<String>> inputMap,
+      Map<String, List<TableBlockInfo>> outputMap, int numberOfBlocks, int numberOfNodes) {
+
+    int nodesPerBlock = numberOfBlocks / numberOfNodes;
+
+    for (Map.Entry<String, List<TableBlockInfo>> entry : outputMap.entrySet()) {
+
+      if (entry.getValue().size() < nodesPerBlock) {
+        return false;
+      }
+    }
+    return true;
+  }
+
+  /**
+   * Test case with 5 blocks and 3 nodes
+   *
+   * @throws Exception
+   */
+  @Test public void nodeBlockMappingTestWith5blocks3nodes() throws Exception {
+
+    Map<TableBlockInfo, List<String>> inputMap = new HashMap<TableBlockInfo, List<String>>(5);
+
+    TableBlockInfo block1 =
+        new TableBlockInfo("part-0-0-1462341987000", 123, "1", new String[] { "1", "2", "3" }, 111);
+    TableBlockInfo block2 =
+        new TableBlockInfo("part-1-0-1462341987000", 123, "2", new String[] { "1", "2", "3" }, 111);
+    TableBlockInfo block3 =
+        new TableBlockInfo("part-2-0-1462341987000", 123, "3", new String[] { "1", "2", "3" }, 111);
+    TableBlockInfo block4 =
+        new TableBlockInfo("part-3-0-1462341987000", 123, "4", new String[] { "1", "2", "3" }, 111);
+    TableBlockInfo block5 =
+        new TableBlockInfo("part-4-0-1462341987000", 123, "5", new String[] { "1", "2", "3" }, 111);
+
+    inputMap.put(block1, Arrays.asList(new String[]{"1","2","3"}));
+    inputMap.put(block2, Arrays.asList(new String[]{"1","2","3"}));
+    inputMap.put(block3, Arrays.asList(new String[]{"1","2","3"}));
+    inputMap.put(block4, Arrays.asList(new String[]{"1","2","3"}));
+    inputMap.put(block5, Arrays.asList(new String[]{"1","2","3"}));
+
+    List<TableBlockInfo> inputBlocks = new ArrayList(6);
+    inputBlocks.add(block1);
+    inputBlocks.add(block2);
+    inputBlocks.add(block3);
+    inputBlocks.add(block4);
+    inputBlocks.add(block5);
+
+    Map<String, List<TableBlockInfo>> outputMap = CarbonLoaderUtil.nodeBlockMapping(inputBlocks, 3);
+
+    Assert.assertTrue(calculateBlockDistribution(inputMap, outputMap, 5, 3));
+
+    Assert.assertTrue(calculateBlockLocality(inputMap, outputMap, 5, 3));
+
+  }
+
+  /**
+   * Test case with 6 blocks and 4 nodes where 4 th node doesnt have any local data.
+   *
+   * @throws Exception
+   */
+  @Test public void nodeBlockMappingTestWith6Blocks4nodes() throws Exception {
+
+    Map<TableBlockInfo, List<String>> inputMap = new HashMap<TableBlockInfo, List<String>>(5);
+
+    TableBlockInfo block1 =
+        new TableBlockInfo("part-0-0-1462341987000", 123, "1", new String[] { "1", "2", "3" }, 111);
+    TableBlockInfo block2 =
+        new TableBlockInfo("part-1-0-1462341987000", 123, "2", new String[] { "1", "2", "3" }, 111);
+    TableBlockInfo block3 =
+        new TableBlockInfo("part-2-0-1462341987000", 123, "3", new String[] { "1", "2", "3" }, 111);
+    TableBlockInfo block4 =
+        new TableBlockInfo("part-3-0-1462341987000", 123, "4", new String[] { "1", "2", "3" }, 111);
+    TableBlockInfo block5 =
+        new TableBlockInfo("part-4-0-1462341987000", 123, "5", new String[] { "1", "2", "3" }, 111);
+    TableBlockInfo block6 =
+        new TableBlockInfo("part-5-0-1462341987000", 123, "6", new String[] { "1", "2", "3" }, 111);
+
+    inputMap.put(block1, Arrays.asList(new String[]{"1","2","3"}));
+    inputMap.put(block2, Arrays.asList(new String[]{"1","2","3"}));
+    inputMap.put(block3, Arrays.asList(new String[]{"1","2","3"}));
+    inputMap.put(block4, Arrays.asList(new String[]{"1","2","3"}));
+    inputMap.put(block5, Arrays.asList(new String[]{"1","2","3"}));
+    inputMap.put(block6, Arrays.asList(new String[]{"1","2","3"}));
+
+
+    List<TableBlockInfo> inputBlocks = new ArrayList(6);
+    inputBlocks.add(block1);
+    inputBlocks.add(block2);
+    inputBlocks.add(block3);
+    inputBlocks.add(block4);
+    inputBlocks.add(block5);
+    inputBlocks.add(block6);
+
+    Map<String, List<TableBlockInfo>> outputMap = CarbonLoaderUtil.nodeBlockMapping(inputBlocks, 4);
+
+    Assert.assertTrue(calculateBlockDistribution(inputMap, outputMap, 6, 4));
+
+    Assert.assertTrue(calculateBlockLocality(inputMap, outputMap, 6, 4));
+
+  }
+
+  /**
+   * Test case with 10 blocks and 4 nodes with 10,60,30 % distribution
+   *
+   * @throws Exception
+   */
+  @Test public void nodeBlockMappingTestWith10Blocks4nodes() throws Exception {
+
+    Map<TableBlockInfo, List<String>> inputMap = new HashMap<TableBlockInfo, List<String>>(5);
+
+    TableBlockInfo block1 =
+        new TableBlockInfo("part-1-0-1462341987000", 123, "1", new String[] { "2", "4" }, 111);
+    TableBlockInfo block2 =
+        new TableBlockInfo("part-2-0-1462341987000", 123, "2", new String[] { "2", "4" }, 111);
+    TableBlockInfo block3 =
+        new TableBlockInfo("part-3-0-1462341987000", 123, "3", new String[] { "2", "4" }, 111);
+    TableBlockInfo block4 =
+        new TableBlockInfo("part-4-0-1462341987000", 123, "4", new String[] { "2", "4" }, 111);
+    TableBlockInfo block5 =
+        new TableBlockInfo("part-5-0-1462341987000", 123, "5", new String[] { "2", "4" }, 111);
+    TableBlockInfo block6 =
+        new TableBlockInfo("part-6-0-1462341987000", 123, "6", new String[] { "2", "4" }, 111);
+    TableBlockInfo block7 =
+        new TableBlockInfo("part-7-0-1462341987000", 123, "7", new String[] { "3", "4" }, 111);
+    TableBlockInfo block8 =
+        new TableBlockInfo("part-8-0-1462341987000", 123, "8", new String[] { "3", "4" }, 111);
+    TableBlockInfo block9 =
+        new TableBlockInfo("part-9-0-1462341987000", 123, "9", new String[] { "3", "4" }, 111);
+    TableBlockInfo block10 =
+        new TableBlockInfo("part-10-0-1462341987000", 123, "9", new String[] { "1", "4" }, 111);
+
+    inputMap.put(block1, Arrays.asList(new String[]{"2","4"}));
+    inputMap.put(block2, Arrays.asList(new String[]{"2","4"}));
+    inputMap.put(block3, Arrays.asList(new String[]{"2","4"}));
+    inputMap.put(block4, Arrays.asList(new String[]{"2","4"}));
+    inputMap.put(block5, Arrays.asList(new String[]{"2","4"}));
+    inputMap.put(block6, Arrays.asList(new String[]{"2","4"}));
+    inputMap.put(block7, Arrays.asList(new String[]{"3","4"}));
+    inputMap.put(block8, Arrays.asList(new String[]{"3","4"}));
+    inputMap.put(block9, Arrays.asList(new String[]{"3","4"}));
+    inputMap.put(block10, Arrays.asList(new String[]{"1","4"}));
+
+    List<TableBlockInfo> inputBlocks = new ArrayList(6);
+    inputBlocks.add(block1);
+    inputBlocks.add(block2);
+    inputBlocks.add(block3);
+    inputBlocks.add(block4);
+    inputBlocks.add(block5);
+    inputBlocks.add(block6);
+    inputBlocks.add(block7);
+    inputBlocks.add(block8);
+    inputBlocks.add(block9);
+    inputBlocks.add(block10);
+
+    Map<String, List<TableBlockInfo>> outputMap = CarbonLoaderUtil.nodeBlockMapping(inputBlocks, 4);
+
+    Assert.assertTrue(calculateBlockDistribution(inputMap, outputMap, 10, 4));
+
+    Assert.assertTrue(calculateBlockLocality(inputMap, outputMap, 10, 4));
+  }
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark-common-test/src/test/java/org/carbondata/integration/spark/testsuite/validation/FileFooterValidator.java
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/java/org/carbondata/integration/spark/testsuite/validation/FileFooterValidator.java b/integration/spark-common-test/src/test/java/org/carbondata/integration/spark/testsuite/validation/FileFooterValidator.java
new file mode 100644
index 0000000..76906c6
--- /dev/null
+++ b/integration/spark-common-test/src/test/java/org/carbondata/integration/spark/testsuite/validation/FileFooterValidator.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.carbondata.integration.spark.testsuite.validation;
+
+import org.apache.spark.sql.common.util.CarbonHiveContext;
+import org.apache.carbondata.core.carbon.CarbonTableIdentifier;
+import org.apache.carbondata.core.carbon.path.CarbonStorePath;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.datastorage.store.FileHolder;
+import org.apache.carbondata.core.datastorage.store.filesystem.CarbonFile;
+import org.apache.carbondata.core.datastorage.store.filesystem.CarbonFileFilter;
+import org.apache.carbondata.core.datastorage.store.impl.FileFactory;
+import org.apache.carbondata.core.reader.CarbonFooterReader;
+import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.carbondata.format.BlockletIndex;
+import org.apache.carbondata.format.BlockletInfo;
+import org.apache.carbondata.format.DataChunk;
+import org.apache.carbondata.format.Encoding;
+import org.apache.carbondata.format.FileFooter;
+import org.junit.AfterClass;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.junit.Assert.assertTrue;
+
+public class FileFooterValidator {
+
+  private static FileFooter fileFooter;
+
+  private static boolean setUpIsDone;
+
+  @Before public void setUp() throws Exception {
+
+    if (setUpIsDone) {
+      return;
+    }
+    CarbonHiveContext.sql(
+            "CREATE CUBE validatefooter DIMENSIONS (empno Integer, empname String,"
+            + " designation String,"
+            + " doj Timestamp, workgroupcategory Integer, workgroupcategoryname String, "
+            + "deptno Integer, deptname String, projectcode Integer, projectjoindate Timestamp,"
+            + " projectenddate Timestamp) MEASURES (attendance Integer,utilization Integer,"
+            + "salary Integer) OPTIONS (PARTITIONER [PARTITION_COUNT=1])");
+    CarbonHiveContext.sql(
+            "LOAD DATA fact from './src/test/resources/data.csv' INTO CUBE validatefooter "
+                + "PARTITIONDATA(DELIMITER ',', QUOTECHAR '\"')");
+    String storePath =
+        CarbonProperties.getInstance().getProperty(CarbonCommonConstants.STORE_LOCATION);
+    CarbonTableIdentifier tableIdentifier =
+            new CarbonTableIdentifier(CarbonCommonConstants.DATABASE_DEFAULT_NAME, "validatefooter", "1");
+    String segmentPath = CarbonStorePath.getCarbonTablePath(storePath, tableIdentifier)
+        .getCarbonDataDirectoryPath("0", "0");
+    CarbonFile carbonFile =
+        FileFactory.getCarbonFile(segmentPath, FileFactory.getFileType(segmentPath));
+    CarbonFile[] list = carbonFile.listFiles(new CarbonFileFilter() {
+      @Override public boolean accept(CarbonFile file) {
+        if (file.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT)) {
+          return true;
+        }
+        return false;
+      }
+    });
+
+    for (CarbonFile file : list) {
+      String fileLocation = file.getAbsolutePath();
+      CarbonFile factFile =
+          FileFactory.getCarbonFile(fileLocation, FileFactory.getFileType(fileLocation));
+      long offset = factFile.getSize() - CarbonCommonConstants.LONG_SIZE_IN_BYTE;
+      FileHolder fileHolder = FileFactory.getFileHolder(FileFactory.getFileType(fileLocation));
+      offset = fileHolder.readLong(fileLocation, offset);
+      CarbonFooterReader metaDataReader = new CarbonFooterReader(fileLocation, offset);
+      fileFooter = metaDataReader.readFooter();
+    }
+    setUpIsDone = true;
+  }
+
+  @AfterClass public static void tearDownAfterClass() {
+    CarbonHiveContext.sql("drop CUBE validatefooter");
+  }
+
+  @Test public void testFileFooterExist() {
+    assertTrue(fileFooter != null);
+  }
+
+  @Test public void testFileFooterVersion() {
+    assertTrue(fileFooter.getVersion() >= 0);
+  }
+
+  @Test public void testFileFooterNumRows() {
+    assertTrue(fileFooter.getNum_rows() > 0);
+  }
+
+  @Test public void testFileFooterTableColumns() {
+    assertTrue(fileFooter.getTable_columns() != null && fileFooter.getTable_columns().size() > 0);
+  }
+
+  @Test public void testFileFooterSegmentInfo() {
+    assertTrue(
+        fileFooter.getSegment_info() != null && fileFooter.getSegment_info().getNum_cols() > 0
+            && fileFooter.getSegment_info().getColumn_cardinalities().size() > 0);
+  }
+
+  @Test public void testFileFooterBlockletIndex() {
+    assertTrue(fileFooter.getBlocklet_index_list() != null
+        && fileFooter.getBlocklet_index_list().size() > 0);
+    for (BlockletIndex blockletIndex : fileFooter.getBlocklet_index_list()) {
+      assertTrue(blockletIndex.getMin_max_index().getMin_values() != null
+          && blockletIndex.getMin_max_index().getMin_values().size() > 0
+          && blockletIndex.getMin_max_index().getMax_values() != null
+          && blockletIndex.getMin_max_index().getMax_values().size() > 0
+          && blockletIndex.getMin_max_index().getMin_values().size() == blockletIndex
+          .getMin_max_index().getMax_values().size());
+      assertTrue(blockletIndex.getB_tree_index().getStart_key() != null
+          && blockletIndex.getB_tree_index().getEnd_key() != null);
+    }
+  }
+
+  @Test public void testFileFooterBlockletInfo() {
+    assertTrue(fileFooter.getBlocklet_info_list() != null
+        && fileFooter.getBlocklet_info_list().size() > 0);
+    for (BlockletInfo blockletInfo : fileFooter.getBlocklet_info_list()) {
+      assertTrue(blockletInfo.getNum_rows() > 0 && blockletInfo.getColumn_data_chunks() != null
+          && blockletInfo.getColumn_data_chunks().size() > 0);
+      for (DataChunk columnDataChunk : blockletInfo.getColumn_data_chunks()) {
+        testColumnDataChunk(columnDataChunk);
+      }
+    }
+  }
+
+  private void testColumnDataChunk(DataChunk columnDatachunk) {
+    assertTrue(columnDatachunk.getEncoders() != null && columnDatachunk.getChunk_meta() != null
+        && columnDatachunk.getChunk_meta().getCompression_codec() != null);
+    // For Measure
+    if (columnDatachunk.getEncoders().contains(Encoding.DELTA)) {
+      assertTrue(
+          columnDatachunk.getPresence() != null && columnDatachunk.getEncoder_meta() != null);
+    } else {
+      assertTrue(columnDatachunk.getSort_state() != null);
+    }
+  }
+}
\ No newline at end of file


Mime
View raw message