carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jack...@apache.org
Subject [carbondata] branch master updated: [CARBONDATA-3503] Optimize Carbon SparkExtensions
Date Sun, 19 Jan 2020 16:58:59 GMT
This is an automated email from the ASF dual-hosted git repository.

jackylk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new f8a1573  [CARBONDATA-3503] Optimize Carbon SparkExtensions
f8a1573 is described below

commit f8a157327c4769ad586b8efd026f0e1a943bcf1e
Author: QiangCai <qiangcai@qq.com>
AuthorDate: Sat Jan 11 18:12:01 2020 +0800

    [CARBONDATA-3503] Optimize Carbon SparkExtensions
    
    Why is this PR needed?
    
    Current Carbon Spark extension does not support following features:
    1. not support mv
    2. Parser still use CarbonSqlAstBuilder
    3. still use CarbonSession to run some test cases
    
    What changes were proposed in this PR?
    
    Enhance Carbon Spark Extension as following:
    1. new order of parsers (CarbonParser->SparkParser)
    it means CarbonParser will parser SQL firstly, which will match only carbon's particular SQL.
    
    CreateTable:
    1. CarbonExtnesion only support "stored as carbondata" and "using carbondata"
    2. use CarbonCreateDataSourceTableCommand instead of CarbonCreateTableCommand
    so there is no hard coded SQL to create a data source table
    
    DataMap:
    1. support MV
    
    TestCases:
    1. remove spark-carbon-common-test module, move test back to spark-common-test module
    2. all test cases will be run using SparkSession with Carbon Extension
    
    Example:
    1. only CarbonSessionExample use CarbonSession, others will use SparkSession with Carbon Extension
    
    Others:
    1. support custom lock interface
    2. support database location provider interface
    
    Does this PR introduce any user interface change?
    Yes. (please explain the change and update document)
    
    Is any new testcase added?
    Yes
    
    This closes #3574
---
 .../core/constants/CarbonCommonConstants.java      |   24 +
 .../core/datamap/DataMapStoreManager.java          |   18 +-
 .../core/datamap/status/DataMapStatusManager.java  |   15 +-
 .../status/DatabaseDataMapStatusProvider.java      |   49 +
 .../carbondata/core/locks/CarbonLockFactory.java   |   63 +-
 .../carbondata/core/locks/ZooKeeperLocking.java    |    2 +-
 .../core/metadata/CarbonTableIdentifier.java       |    4 +-
 .../core/metadata/DatabaseLocationProvider.java    |   64 ++
 .../core/metadata/datatype/DataTypes.java          |    2 +-
 .../core/metadata/schema/table/CarbonTable.java    |    9 +-
 .../metadata/schema/table/CarbonTableBuilder.java  |    2 +-
 .../schema/table/DataMapSchemaFactory.java         |   15 +
 .../table/DatabaseDMSchemaStorageProvider.java     |   59 ++
 .../core/scan/expression/ExpressionResult.java     |   51 +
 .../expression/conditional/EqualToExpression.java  |    4 +-
 .../conditional/GreaterThanEqualToExpression.java  |    4 +-
 .../conditional/GreaterThanExpression.java         |    4 +-
 .../scan/expression/conditional/InExpression.java  |    4 +-
 .../conditional/LessThanEqualToExpression.java     |    4 +-
 .../expression/conditional/LessThanExpression.java |    4 +-
 .../conditional/NotEqualsExpression.java           |    4 +-
 .../expression/conditional/NotInExpression.java    |    4 +-
 .../core/statusmanager/SegmentStatusManager.java   |   53 +
 .../carbondata/core/util/CarbonProperties.java     |   38 +
 .../core/util/ThreadLocalSessionInfo.java          |   14 +
 .../carbondata/core/util/path/CarbonTablePath.java |    2 +-
 .../core/carbon/CarbonTableIdentifierTest.java     |    2 +-
 .../core/locks/CarbonLockFactoryTest.java          |  130 +++
 .../metadata/DatabaseLocationProviderTest.java     |   45 +
 .../conditional/EqualToExpressionUnitTest.java     |    2 +-
 .../GreaterThanEqualToExpressionUnitTest.java      |    4 +-
 .../conditional/GreaterThanExpressionUnitTest.java |    4 +-
 .../LessThanEqualToExpressionUnitTest.java         |    4 +-
 .../conditional/LessThanExpressionUnitTest.java    |    4 +-
 .../conditional/NotEqualsExpressionUnitTest.java   |    4 +-
 .../datamap/examples/MinMaxDataMapSuite.scala      |    4 +-
 .../carbondata/mv/datamap/MVAnalyzerRule.scala     |   36 +-
 .../carbondata/mv/datamap/MVDataMapProvider.scala  |   14 +-
 .../apache/carbondata/mv/datamap/MVHelper.scala    |   13 +-
 .../mv/rewrite/SummaryDatasetCatalog.scala         |    7 +-
 .../carbondata/mv/rewrite/MVCoalesceTestCase.scala |   10 +-
 .../mv/rewrite/MVCountAndCaseTestCase.scala        |    8 +-
 .../carbondata/mv/rewrite/MVCreateTestCase.scala   |  360 +++----
 .../mv/rewrite/MVExceptionTestCase.scala           |    6 +-
 .../mv/rewrite/MVFilterAndJoinTest.scala           |   14 +-
 .../mv/rewrite/MVIncrementalLoadingTestcase.scala  |  116 +--
 .../carbondata/mv/rewrite/MVInvalidTestCase.scala  |    8 +-
 .../mv/rewrite/MVMultiJoinTestCase.scala           |   10 +-
 .../carbondata/mv/rewrite/MVRewriteTestCase.scala  |   86 +-
 .../carbondata/mv/rewrite/MVSampleTestCase.scala   |   36 +-
 .../carbondata/mv/rewrite/MVTPCDSTestCase.scala    |   33 +-
 .../carbondata/mv/rewrite/MVTpchTestCase.scala     |   86 +-
 .../mv/rewrite/SelectAllColumnsSuite.scala         |   11 +-
 .../mv/rewrite/TestAllOperationsOnMV.scala         |   80 +-
 .../mv/rewrite/TestPartitionWithMV.scala           |   81 +-
 .../carbondata/mv/rewrite/TestSQLSuite.scala       |    3 +-
 .../carbondata/mv/rewrite/Tpcds_1_4_Suite.scala    |    2 +-
 .../TestMVTimeSeriesCreateDataMapCommand.scala     |   14 +-
 .../timeseries/TestMVTimeSeriesLoadAndQuery.scala  |   15 +-
 .../carbondata/mv/testutil/ModularPlanTest.scala   |    4 +-
 .../carbondata/mv/plans/ModularToSQLSuite.scala    |    6 +-
 .../carbondata/mv/plans/SignatureSuite.scala       |    3 +-
 docs/datamap/mv-datamap-guide.md                   |    2 +-
 docs/ddl-of-carbondata.md                          |   12 +-
 docs/hive-guide.md                                 |    2 +-
 ...277\207\346\273\244\346\235\241\344\273\266.md" |    2 +-
 .../examples/sql/JavaCarbonSessionExample.java     |    5 +-
 .../benchmark/SimpleQueryBenchmark.scala           |    6 +-
 .../carbondata/examples/AlluxioExample.scala       |    5 +-
 .../carbondata/examples/AlterTableExample.scala    |    4 +-
 .../examples/CarbonDataFrameExample.scala          |    2 +-
 .../carbondata/examples/CarbonSessionExample.scala |    4 +-
 .../examples/CarbonSortColumnsExample.scala        |    6 +-
 .../examples/CaseClassDataFrameAPIExample.scala    |    2 +-
 .../examples/CustomCompactionExample.scala         |   21 +-
 .../examples/DataFrameComplexTypeExample.scala     |    6 +-
 .../examples/DataManagementExample.scala           |   21 +-
 .../examples/DataUpdateDeleteExample.scala         |    2 +-
 .../carbondata/examples/DirectSQLExample.scala     |    2 +-
 .../carbondata/examples/ExternalTableExample.scala |   26 +-
 .../carbondata/examples/HadoopFileExample.scala    |    2 +-
 .../apache/carbondata/examples/HiveExample.scala   |   46 +-
 .../carbondata/examples/LuceneDataMapExample.scala |    2 +-
 .../carbondata/examples/MVDataMapExample.scala     |    8 +-
 .../carbondata/examples/QuerySegmentExample.scala  |    4 +-
 .../apache/carbondata/examples/S3CsvExample.scala  |    9 +-
 .../org/apache/carbondata/examples/S3Example.scala |    6 +-
 .../carbondata/examples/S3UsingSDkExample.scala    |    6 +-
 .../carbondata/examples/SparkSessionExample.scala  |  154 ++-
 .../examples/SparkStreamingExample.scala           |    4 +-
 .../examples/StandardPartitionExample.scala        |    8 +-
 .../carbondata/examples/StreamSQLExample.scala     |    2 +-
 .../examples/StreamingUsingBatchLoadExample.scala  |    4 +-
 .../examples/StreamingWithRowParserExample.scala   |    6 +-
 .../examples/StructuredStreamingExample.scala      |    6 +-
 .../TableLevelCompactionOptionExample.scala        |    5 +-
 .../carbondata/examples/util/ExampleUtils.scala    |   31 +-
 .../apache/carbondata/examplesCI/RunExamples.scala |    2 +-
 .../carbondata/hive/CarbonFileHiveSerDe.java}      |   19 +-
 .../apache/carbondata/hive/CarbonHiveSerDe.java    |   51 +-
 .../carbondata/hive/CarbonObjectInspector.java     |    9 +-
 .../apache/carbondata/hive/HiveDataTypeUtils.java  |  109 ++
 integration/spark-carbon-common-test/pom.xml       |  438 --------
 .../spark/util/CarbonSparkQueryTest.scala          |   50 -
 .../cluster/sdv/generated/AlterTableTestCase.scala |  160 +--
 .../cluster/sdv/generated/BadRecordTestCase.scala  |   30 +-
 .../sdv/generated/BloomFilterDataMapTestCase.scala |    8 +-
 .../cluster/sdv/generated/BucketingTestCase.scala  |   14 +-
 .../sdv/generated/ComplexDataTypeTestCase.scala    |   36 +-
 .../generated/CreateTableAsSelectTestCase.scala    |   38 +-
 .../CreateTableWithLocalDictionaryTestCase.scala   |  258 ++---
 .../sdv/generated/DataLoadingIUDTestCase.scala     |  490 ++++-----
 .../sdv/generated/DataLoadingTestCase.scala        |  262 ++---
 .../sdv/generated/DataLoadingV3TestCase.scala      |   10 +-
 .../cluster/sdv/generated/GlobalSortTestCase.scala |  100 +-
 .../sdv/generated/InvertedindexTestCase.scala      |   56 +-
 .../LoadTableWithLocalDictionaryTestCase.scala     |   24 +-
 .../cluster/sdv/generated/LuceneTestCase.scala     |   16 +-
 .../cluster/sdv/generated/MergeIndexTestCase.scala |    8 +-
 .../sdv/generated/OffheapQuery1TestCase.scala      |    4 +-
 .../sdv/generated/OffheapQuery2TestCase.scala      |    4 +-
 .../sdv/generated/OffheapSort1TestCase.scala       |   28 +-
 .../sdv/generated/OffheapSort2TestCase.scala       |   28 +-
 .../sdv/generated/PrestoSampleTestCase.scala       |    2 +-
 .../cluster/sdv/generated/QueriesBVATestCase.scala |    4 +-
 .../sdv/generated/QueriesBasicTestCase.scala       |   20 +-
 .../sdv/generated/QueriesCompactionTestCase.scala  |    6 +-
 .../sdv/generated/QueriesNormalTestCase.scala      |    8 +-
 .../sdv/generated/QueriesRangeFilterTestCase.scala |   12 +-
 .../generated/QueriesSparkBlockDistTestCase.scala  |    2 +-
 .../cluster/sdv/generated/SDKwriterTestCase.scala  |   85 +-
 .../sdv/generated/SetParameterTestCase.scala       |   26 +-
 .../cluster/sdv/generated/ShowLoadsTestCase.scala  |    6 +-
 .../generated/SortColumnExcudeDictTestCase.scala   |   62 +-
 .../cluster/sdv/generated/SortColumnTestCase.scala |   68 +-
 .../sdv/generated/StandardPartitionTestCase.scala  |   70 +-
 .../generated/TableCommentAlterTableTestCase.scala |   26 +-
 .../generated/TestPartitionWithGlobalSort.scala    |   46 +-
 .../sdv/generated/TimestamptypesTestCase.scala     |    8 +-
 .../sdv/generated/V3offheapvectorTestCase.scala    |    8 +-
 .../cluster/sdv/generated/Vector1TestCase.scala    |    6 +-
 .../cluster/sdv/generated/Vector2TestCase.scala    |    6 +-
 .../sdv/register/TestRegisterCarbonTable.scala     |   14 +-
 .../lucene/LuceneCoarseGrainDataMapSuite.scala     |    4 +-
 .../lucene/LuceneFineGrainDataMapSuite.scala       |   69 +-
 .../aggquery/IntegerDataTypeTestCase.scala         |   10 +-
 .../spark/testsuite/bigdecimal/TestBigInt.scala    |    4 +-
 .../TestDimensionWithDecimalDataType.scala         |    4 +-
 .../testsuite/binary/TestBinaryDataType.scala      |   66 +-
 .../complexType/TestAdaptiveComplexType.scala      |   94 +-
 .../TestAdaptiveEncodingForNullValues.scala        |   26 +-
 .../complexType/TestAllComplexDataType.scala       |    4 +-
 .../complexType/TestCompactionComplexType.scala    |   96 +-
 .../complexType/TestComplexDataType.scala          |  197 ++--
 .../complexType/TestComplexTypeQuery.scala         |   23 +-
 .../complexType/TestComplexTypeWithBigArray.scala  |    4 +-
 .../complexType/TestCreateTableWithDouble.scala    |    4 +-
 .../dataload/MultiFilesDataLoagdingTestCase.scala  |    2 +-
 .../testsuite/dataload/TestLoadDataGeneral.scala   |   16 +-
 .../dataload/TestLoadDataWithAutoLoadMerge.scala   |    2 +-
 .../dataload/TestLoadDataWithBlankLine.scala       |    4 +-
 .../dataload/TestLoadDataWithCompression.scala     |    2 +-
 .../TestLoadDataWithEmptyArrayColumns.scala        |    2 +-
 .../dataload/TestLoadDataWithJunkChars.scala       |    2 +-
 .../dataload/TestLoadDataWithMaxMinBigInt.scala    |    6 +-
 .../dataload/TestLoadDataWithMaxMinInteger.scala   |    6 +-
 .../dataload/TestLoadDataWithNullMeasures.scala    |    2 +-
 .../TestLoadDataWithSortColumnBounds.scala         |   18 +-
 .../dataload/TestLoadDataWithYarnLocalDirs.scala   |    2 +-
 .../dataload/TestNoInvertedIndexLoadAndQuery.scala |   24 +-
 .../emptyrow/TestCSVHavingOnlySpaceChar.scala      |    2 +-
 .../spark/testsuite/emptyrow/TestEmptyRows.scala   |    2 +-
 .../testsuite/emptyrow/TestSkipEmptyLines.scala    |   12 +-
 .../primitiveTypes/DoubleDataTypeTestCase.scala    |    2 +-
 .../primitiveTypes/FloatDataTypeTestCase.scala     |    2 +-
 .../TestAdaptiveEncodingForPrimitiveTypes.scala    |   40 +-
 .../testsuite/addsegment/AddSegmentTestCase.scala  |   10 +-
 .../aggquery/AllDataTypesTestCaseAggregate.scala   |    2 +-
 .../testsuite/aggquery/AverageQueryTestCase.scala  |    2 +-
 .../allqueries/AllDataTypesTestCase.scala          |    4 +-
 .../testsuite/allqueries/DoubleDataTypeTest.scala  |   24 +-
 .../allqueries/InsertIntoCarbonTableTestCase.scala |   48 +-
 .../allqueries/MeasureOnlyTableTestCases.scala     |    2 +-
 ...ryWithColumnMetCacheAndCacheLevelProperty.scala |   14 +-
 .../allqueries/TestQueryWithoutDataLoad.scala      |    2 +-
 .../allqueries/TestTableNameHasDbName.scala        |    2 +-
 .../alterTable/TestAlterTableAddColumns.scala      |    2 +-
 ...leWithColumnMetCacheAndCacheLevelProperty.scala |    4 +-
 .../badrecordloger/BadRecordActionTest.scala       |   32 +-
 .../badrecordloger/BadRecordEmptyDataTest.scala    |   12 +-
 .../badrecordloger/BadRecordLoggerTest.scala       |   24 +-
 .../testsuite/bigdecimal/TestAvgForBigInt.scala    |    2 +-
 .../testsuite/bigdecimal/TestBigDecimal.scala      |   12 +-
 .../bigdecimal/TestNullAndEmptyFields.scala        |    3 +-
 .../bigdecimal/TestNullAndEmptyFieldsUnsafe.scala  |    3 +-
 .../blockprune/BlockPruneQueryTestCase.scala       |    2 +-
 .../CarbonCustomBlockDistributionTest.scala        |    2 +-
 .../compaction/TestHybridCompaction.scala          |    2 +-
 .../TestAlterTableWithTableComment.scala           |    6 +-
 ...bonFileInputFormatWithExternalCarbonTable.scala |   33 +-
 .../TestCreateDDLForComplexMapType.scala           |   53 +-
 .../createTable/TestCreateExternalTable.scala      |   23 +-
 .../TestCreateHiveTableWithCarbonDS.scala          |    2 +-
 .../createTable/TestCreateTableAsSelect.scala      |   70 +-
 .../createTable/TestCreateTableIfNotExists.scala   |   17 +-
 .../createTable/TestCreateTableLike.scala          |    8 +-
 .../TestCreateTableWithColumnComment.scala         |    4 +-
 ...leWithColumnMetCacheAndCacheLevelProperty.scala |   36 +-
 .../TestCreateTableWithCompactionOptions.scala     |   12 +-
 ...TestCreateTableWithDatabaseNameCaseChange.scala |    8 +-
 .../createTable/TestCreateTableWithSortScope.scala |   14 +-
 .../TestCreateTableWithSpaceInColumnName.scala     |    4 +-
 .../TestCreateTableWithTableComment.scala          |    4 +-
 .../TestNonTransactionalCarbonTable.scala          |  155 ++-
 .../TestNonTransactionalCarbonTableForBinary.scala |   10 +-
 ...TestNonTransactionalCarbonTableForMapType.scala |   18 +-
 ...TestNonTransactionalCarbonTableJsonWriter.scala |   16 +-
 ...nTransactionalCarbonTableWithAvroDataType.scala |  125 +--
 ...onTransactionalCarbonTableWithComplexType.scala |   10 +-
 .../createTable/TestRenameTableWithDataMap.scala   |    4 +-
 .../CarbonIndexFileMergeTestCase.scala             |   30 +-
 .../CompactionSupportGlobalSortBigFileTest.scala   |    4 +-
 .../CompactionSupportGlobalSortFunctionTest.scala  |    8 +-
 .../CompactionSupportGlobalSortParameterTest.scala |    4 +-
 .../CompactionSupportSpecifiedSegmentsTest.scala   |    2 +-
 .../DataCompactionBlockletBoundryTest.scala        |    4 +-
 .../DataCompactionBoundaryConditionsTest.scala     |    6 +-
 .../DataCompactionCardinalityBoundryTest.scala     |    7 +-
 .../datacompaction/DataCompactionLockTest.scala    |    8 +-
 .../MajorCompactionIgnoreInMinorTest.scala         |    8 +-
 .../MajorCompactionStopsAfterCompaction.scala      |    4 +-
 .../MajorCompactionWithMeasureSortColumns.scala    |    2 +-
 .../TableLevelCompactionOptionTest.scala           |   18 +-
 .../TestDataLoadWithColumnsMoreThanSchema.scala    |   17 +-
 .../dataload/TestDataLoadWithFileName.scala        |    2 +-
 .../TestDataWithDicExcludeAndInclude.scala         |    2 +-
 .../dataload/TestGlobalSortDataLoad.scala          |   24 +-
 .../testsuite/dataload/TestLoadDataFrame.scala     |    6 +-
 .../dataload/TestLoadDataUseAllDictionary.scala    |    2 +-
 .../TestLoadDataWithDiffTimestampFormat.scala      |    2 +-
 .../TestLoadDataWithFileHeaderException.scala      |    2 +-
 .../TestLoadDataWithHiveSyntaxDefaultFormat.scala  |   76 +-
 .../TestLoadDataWithHiveSyntaxUnsafe.scala         |   52 +-
 ...adDataWithMalformedCarbonCommandException.scala |    2 +-
 .../dataload/TestLoadDataWithNoMeasure.scala       |   17 +-
 .../spark/testsuite/dataload/TestLoadOptions.scala |    2 +-
 .../dataload/TestLoadTblNameIsKeyword.scala        |    6 +-
 .../dataload/TestLoadWithSortTempCompressed.scala  |    2 +-
 .../dataload/TestRangeColumnDataLoad.scala         |   54 +-
 .../dataload/TestTableLevelBlockSize.scala         |    8 +-
 .../testsuite/dataload/TestTableLoadMinSize.scala  |    8 +-
 .../testsuite/datamap/CGDataMapTestCase.scala      |   21 +-
 .../testsuite/datamap/DataMapWriterSuite.scala     |    8 +-
 .../testsuite/datamap/FGDataMapTestCase.scala      |   15 +-
 .../testsuite/datamap/TestDataMapCommand.scala     |   22 +-
 .../testsuite/datamap/TestDataMapStatus.scala      |   20 +-
 .../dataretention/DataRetentionTestCase.scala      |    6 +-
 .../dblocation/DBLocationCarbonTableTestCase.scala |   26 +-
 .../deleteTable/TestDeleteTableNewDDL.scala        |   29 +-
 .../describeTable/TestDescribeTable.scala          |   10 +-
 .../detailquery/AllQueriesSpark2TestCase.scala     |    2 +-
 .../testsuite/detailquery/CastColumnTestCase.scala |    4 +-
 .../ColumnPropertyValidationTestCase.scala         |    4 +-
 .../detailquery/ExpressionWithNullTestCase.scala   |    2 +-
 .../HighCardinalityDataTypesTestCase.scala         |    6 +-
 .../detailquery/IntegerDataTypeTestCase.scala      |    2 +-
 .../detailquery/NoDictionaryColumnTestCase.scala   |    4 +-
 .../RangeFilterAllDataTypesTestCases.scala         |   14 +-
 .../detailquery/RangeFilterTestCase.scala          |   12 +-
 .../SubqueryWithFilterAndSortTestCase.scala        |    2 +-
 .../ValueCompressionDataTypeTestCase.scala         |    8 +-
 .../DateDataTypeDirectDictionaryTest.scala         |    2 +-
 ...ataTypeDirectDictionaryWithNoDictTestCase.scala |    2 +-
 ...rectDictionaryWithOffHeapSortDisabledTest.scala |    2 +-
 .../DateDataTypeNullDataTest.scala                 |    2 +-
 ...TimestampDataTypeDirectDictionaryTestCase.scala |    4 +-
 ...ataTypeDirectDictionaryWithNoDictTestCase.scala |    2 +-
 .../TimestampDataTypeNullDataTest.scala            |    2 +-
 .../TimestampNoDictionaryColumnCastTestCase.scala  |    4 +-
 .../TimestampNoDictionaryColumnTestCase.scala      |    2 +-
 .../filterexpr/AllDataTypesTestCaseFilter.scala    |    2 +-
 .../testsuite/filterexpr/CountStarTestCase.scala   |    2 +-
 .../filterexpr/FilterProcessorTestCase.scala       |   24 +-
 .../filterexpr/GrtLtFilterProcessorTestCase.scala  |    8 +-
 .../filterexpr/IntegerDataTypeTestCase.scala       |    2 +-
 .../NullMeasureValueTestCaseFilter.scala           |    2 +-
 .../TestAndEqualFilterEmptyOperandValue.scala      |    5 +-
 .../testsuite/filterexpr/TestBetweenFilter.scala   |    2 +-
 .../testsuite/filterexpr/TestGrtLessFilter.scala   |    5 +-
 .../filterexpr/TestImplicitFilterExpression.scala  |    4 +-
 .../spark/testsuite/filterexpr/TestInFilter.scala  |    2 +-
 .../testsuite/filterexpr/TestIsNullFilter.scala    |    2 +-
 .../testsuite/filterexpr/TestNotNullFilter.scala   |    2 +-
 .../FlatFolderTableLoadingTestCase.scala           |   12 +-
 .../InsertIntoNonCarbonTableTestCase.scala         |    4 +-
 .../testsuite/iud/DeleteCarbonTableTestCase.scala  |   34 +-
 .../iud/HorizontalCompactionTestCase.scala         |   34 +-
 .../iud/TestInsertAndOtherCommandConcurrent.scala  |    7 +-
 .../testsuite/iud/UpdateCarbonTableTestCase.scala  |  116 +--
 .../UpdateCarbonTableTestCaseWithBadRecord.scala   |    4 +-
 .../joinquery/AllDataTypesTestCaseJoin.scala       |   10 +-
 .../joinquery/IntegerDataTypeTestCase.scala        |    2 +-
 .../joinquery/JoinWithoutDictionaryColumn.scala    |    8 +-
 .../testsuite/joinquery/OrderByLimitTestCase.scala |    4 +-
 .../LocalDictionarySupportAlterTableTest.scala     |  116 +--
 .../LocalDictionarySupportCreateTableTest.scala    |  260 ++---
 .../LocalDictionarySupportLoadTableTest.scala      |   26 +-
 .../longstring/VarcharDataTypesBasicTestCase.scala |   28 +-
 .../NullMeasureValueTestCaseAggregate.scala        |    2 +-
 .../TestNullValueSerialization.scala               |    3 +-
 .../testsuite/partition/TestShowPartitions.scala   |    2 +-
 .../partition/TestUpdateForPartitionTable.scala    |    2 +-
 .../testsuite/sortcolumns/TestSortColumns.scala    |   71 +-
 .../sortcolumns/TestSortColumnsWithUnsafe.scala    |   40 +-
 .../sortexpr/AllDataTypesTestCaseSort.scala        |    2 +-
 .../sortexpr/IntegerDataTypeTestCase.scala         |    2 +-
 .../StandardPartitionBadRecordLoggerTest.scala     |   20 +-
 .../StandardPartitionGlobalSortTestCase.scala      |  104 +-
 .../StandardPartitionTableCleanTestCase.scala      |   10 +-
 .../StandardPartitionTableCompactionTestCase.scala |   16 +-
 .../StandardPartitionTableDropTestCase.scala       |   16 +-
 .../StandardPartitionTableLoadingTestCase.scala    |   51 +-
 .../StandardPartitionTableOverwriteTestCase.scala  |   32 +-
 .../StandardPartitionTableQueryTestCase.scala      |   59 +-
 .../windowsexpr/WindowsExprTestCase.scala          |    2 +-
 .../sql/commands/TestCarbonDropCacheCommand.scala  |    9 +-
 .../sql/commands/TestCarbonShowCacheCommand.scala  |   14 +-
 .../command/CarbonTableSchemaCommonSuite.scala     |    4 +-
 .../org/apache/carbondata/api/CarbonStore.scala    |    8 +-
 .../spark/load/DataLoadProcessBuilderOnSpark.scala |   31 +-
 .../spark/load/DataLoadProcessorStepOnSpark.scala  |   29 +-
 .../carbondata/spark/load/GlobalSortHelper.scala   |    4 +-
 .../carbondata/spark/rdd/CarbonScanRDD.scala       |    3 +-
 .../carbondata/spark/util/CarbonScalaUtil.scala    |   13 +-
 .../apache/carbondata/spark/util/CommonUtil.scala  |    2 +-
 .../spark/sql/catalyst/CarbonDDLSqlParser.scala    | 1098 +-------------------
 ...onDDLSqlParser.scala => CarbonParserUtil.scala} |  411 +-------
 .../command/carbonTableSchemaCommon.scala          |   15 +-
 .../apache/spark/sql/test/TestQueryExecutor.scala  |   27 +-
 .../spark/sql/test/util/CarbonQueryTest.scala      |  212 ----
 .../org/apache/spark/sql/test/util/QueryTest.scala |    3 +-
 .../org/apache/spark/sql/util/SparkSQLUtil.scala   |    2 +-
 .../apache/spark/util/CarbonReflectionUtils.scala  |    4 +-
 .../scala/org/apache/spark/util/FileUtils.scala    |    4 +-
 .../apache/spark/sql/profiler/ProfilerSuite.scala  |    8 +-
 .../spark/sql/hive/CarbonOptimizerUtil.scala       |    2 +-
 .../apache/spark/sql/hive/CarbonSessionState.scala |   91 +-
 .../spark/sql/hive/SqlAstBuilderHelper.scala       |   13 +-
 .../carbondata/datamap/IndexDataMapProvider.java   |    2 +-
 .../carbondata/indexserver/IndexServer.scala       |   12 +-
 .../carbondata/spark/util/CarbonSparkUtil.scala    |   30 +-
 .../apache/carbondata/store/SparkCarbonStore.scala |    1 -
 .../apache/spark/sql/CarbonDataFrameWriter.scala   |    8 +-
 .../spark/sql/CarbonDatasourceHadoopRelation.scala |   81 +-
 .../scala/org/apache/spark/sql/CarbonEnv.scala     |   51 +-
 .../org/apache/spark/sql/CarbonExtensions.scala    |  110 +-
 .../scala/org/apache/spark/sql/CarbonSession.scala |    2 +-
 .../scala/org/apache/spark/sql/CarbonSource.scala  |  183 ++--
 .../scala/org/apache/spark/sql/CarbonUtils.scala   |    5 +-
 .../scala/org/apache/spark/sql/EnvHelper.scala     |   39 +-
 .../command/datamap/CarbonDropDataMapCommand.scala |   19 +-
 .../management/CarbonCleanFilesCommand.scala       |    6 +-
 .../CarbonInsertIntoHadoopFsRelationCommand.scala  |   18 +-
 .../command/management/CarbonLoadDataCommand.scala |   69 +-
 .../management/RefreshCarbonTableCommand.scala     |    5 +-
 .../command/mutation/CarbonTruncateCommand.scala   |   56 +
 .../CarbonAlterTableAddHivePartitionCommand.scala  |    2 +-
 .../CarbonAlterTableDropHivePartitionCommand.scala |   11 +-
 ...nAlterTableColRenameDataTypeChangeCommand.scala |   12 +-
 .../schema/CarbonAlterTableDropColumnCommand.scala |    4 +-
 .../schema/CarbonAlterTableRenameCommand.scala     |   12 +-
 .../schema/CarbonAlterTableSetCommand.scala        |    1 -
 .../table/CarbonCreateDataSourceTableCommand.scala |   72 ++
 .../command/table/CarbonCreateTableCommand.scala   |   80 +-
 .../table/CarbonDescribeFormattedCommand.scala     |   15 +-
 .../command/table/CarbonDropTableCommand.scala     |    8 +-
 .../command/table/CarbonExplainCommand.scala       |   35 +-
 .../table/CarbonShowCreateTableCommand.scala       |  119 +++
 .../sql/execution/strategy/CarbonPlanHelper.scala  |  172 +++
 .../spark/sql/execution/strategy/DDLHelper.scala   |  470 +++++++++
 .../spark/sql/execution/strategy/DDLStrategy.scala |  517 +++------
 .../spark/sql/execution/strategy/DMLHelper.scala   |   70 ++
 .../sql/execution/strategy/PushDownHelper.scala    |  123 +++
 .../strategy/StreamingTableStrategy.scala          |   26 +-
 .../spark/sql/hive/CarbonAnalysisRules.scala       |   11 +
 .../spark/sql/hive/CarbonFileMetastore.scala       |   49 +-
 .../org/apache/spark/sql/hive/CarbonMVRules.scala} |   38 +-
 .../spark/sql/hive/cli/CarbonSQLCLIDriver.scala    |   16 +-
 .../execution/command/CarbonHiveCommands.scala     |   10 -
 .../execution/command/CarbonResetCommand.scala}    |   37 +-
 .../apache/spark/sql/optimizer/CarbonFilters.scala |    7 +-
 .../parser/CarbonExtensionSpark2SqlParser.scala    |   77 ++
 .../sql/parser/CarbonExtensionSqlParser.scala      |   72 ++
 .../spark/sql/parser/CarbonSpark2SqlParser.scala   |  173 +--
 .../spark/sql/parser/CarbonSparkSqlParser.scala    |    2 +-
 .../sql/parser/CarbonSparkSqlParserUtil.scala      |  385 ++++++-
 .../sql/test/CarbonSpark2TestQueryExecutor.scala   |   89 --
 .../spark/sql/test/Spark2TestQueryExecutor.scala   |    8 +-
 .../org/apache/spark/util/AlterTableUtil.scala     |   29 +
 .../apache/spark/sql/hive/CarbonOptimizer.scala    |    2 +-
 ...apache.spark.sql.test.TestQueryExecutorRegister |    3 +-
 .../BloomCoarseGrainDataMapFunctionSuite.scala     |   78 +-
 .../bloom/BloomCoarseGrainDataMapSuite.scala       |   44 +-
 .../carbondata/spark/testsuite/TestCarbonCli.scala |    2 +-
 .../InsertIntoCarbonTableSpark2TestCase.scala      |    2 +-
 .../booleantype/BooleanDataTypesBaseTest.scala     |   22 +-
 .../booleantype/BooleanDataTypesBigFileTest.scala  |   20 +-
 .../booleantype/BooleanDataTypesFilterTest.scala   |    8 +-
 .../booleantype/BooleanDataTypesInsertTest.scala   |   48 +-
 .../booleantype/BooleanDataTypesLoadTest.scala     |   26 +-
 .../BooleanDataTypesParameterTest.scala            |   10 +-
 .../booleantype/BooleanDataTypesSortTest.scala     |    8 +-
 .../compress/TestBooleanCompressSuite.scala        |    2 +-
 .../segmentreading/TestSegmentReading.scala        |   18 +-
 .../TestSegmentReadingForMultiThreading.scala      |    2 +-
 .../carbondata/store/SparkCarbonStoreTest.scala    |    2 +-
 .../carbondata/BadRecordPathLoadOptionTest.scala   |    2 +-
 .../spark/carbondata/CarbonDataSourceSuite.scala   |    2 +-
 .../carbondata/DataLoadFailAllTypeSortTest.scala   |   12 +-
 .../carbondata/TestStreamingTableOpName.scala      |   16 +-
 .../carbondata/TestStreamingTableQueryFilter.scala |    2 +-
 .../TestStreamingTableWithLongString.scala         |    4 +-
 .../TestStreamingTableWithRowParser.scala          |    4 +-
 .../bucketing/TableBucketingTestCase.scala         |   14 +-
 .../datatype/NumericDimensionBadRecordTest.scala   |   14 +-
 .../deletetable/DeleteTableTestCase.scala          |    9 +-
 .../iud/DeleteCarbonTableSubqueryTestCase.scala    |    6 +-
 .../carbondata/query/SubQueryJoinTestSuite.scala   |   10 +-
 .../spark/carbondata/query/SubQueryTestSuite.scala |    4 +-
 .../carbondata/query/TestNotEqualToFilter.scala    |    2 +-
 .../register/TestRegisterCarbonTable.scala         |   36 +-
 .../restructure/AlterTableRevertTestCase.scala     |    2 +-
 .../restructure/AlterTableUpgradeSegmentTest.scala |    2 +-
 .../restructure/AlterTableValidationTestCase.scala |   59 +-
 .../vectorreader/AddColumnTestCases.scala          |   66 +-
 .../AlterTableColumnRenameTestCase.scala           |   27 +-
 .../vectorreader/ChangeDataTypeTestCases.scala     |   12 +-
 .../vectorreader/DropColumnTestCases.scala         |    8 +-
 .../vectorreader/VectorReaderTestCase.scala        |    2 +-
 .../apache/spark/sql/CarbonExtensionSuite.scala    |    5 +-
 .../spark/sql/GetDataSizeAndIndexSizeTest.scala    |   22 +-
 .../spark/sql/common/util/Spark2QueryTest.scala    |    5 +-
 .../mutation/CarbonTruncateCommandTest.scala       |   50 +
 .../org/apache/spark/util/CarbonCommandSuite.scala |    7 +-
 pom.xml                                            |    2 -
 .../loading/model/CarbonLoadModelBuilder.java      |    7 +-
 .../processing/loading/model/LoadOption.java       |   14 +-
 .../processing/util/CarbonDataProcessorUtil.java   |    4 +-
 .../carbondata/lcm/locks/ZooKeeperLockingTest.java |    2 +-
 .../carbondata/sdk/file/AvroCarbonWriter.java      |    6 +-
 .../carbondata/sdk/file/CarbonWriterBuilder.java   |    2 +-
 451 files changed, 7246 insertions(+), 7283 deletions(-)

diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index 10b2d89..8c7d7c4 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -192,6 +192,13 @@ public final class CarbonCommonConstants {
   public static final String LOCK_PATH_DEFAULT = "";
 
   /**
+   * Specifies the lock implement class.
+   * the class should be the implement of ICarbonLock
+   */
+  @CarbonProperty
+  public static final String LOCK_CLASS = "carbon.lock.class";
+
+  /**
    * ZOOKEEPER_LOCATION this is the location in zookeeper file system where locks are created.
    * mechanism of carbon
    */
@@ -362,6 +369,15 @@ public final class CarbonCommonConstants {
   @CarbonProperty
   public static final String CARBON_SYSTEM_FOLDER_LOCATION = "carbon.system.folder.location";
 
+  @CarbonProperty
+  public static final String CARBON_DATAMAP_SCHEMA_STORAGE = "carbon.datamap.schema.storage";
+
+  public static final String CARBON_DATAMAP_SCHEMA_STORAGE_DEFAULT = "DISK";
+
+  public static final String CARBON_DATAMAP_SCHEMA_STORAGE_DISK = "DISK";
+
+  public static final String CARBON_DATAMAP_SCHEMA_STORAGE_DATABASE = "DATABASE";
+
   /**
    * It is internal configuration and used only for test purpose.
    * It will merge the carbon index files with in the segment to single segment.
@@ -1898,6 +1914,11 @@ public final class CarbonCommonConstants {
   public static final long CARBON_256MB = 256 * 1024 * 1024;
 
   /**
+   * CUSTOM TYPE
+   */
+  public static final String CARBON_LOCK_TYPE_CUSTOM = "CUSTOM";
+
+  /**
    * ZOOKEEPERLOCK TYPE
    */
   public static final String CARBON_LOCK_TYPE_ZOOKEEPER = "ZOOKEEPERLOCK";
@@ -2288,6 +2309,9 @@ public final class CarbonCommonConstants {
    */
   public static final int MAXIMUM_CHAR_LENGTH = 128;
 
+  public static final String DATABASE_LOCATION_PROVIDER =
+      "org.apache.carbondata.database.location.provider";
+
   /**
    * Carbon property for timeseries MV to define the first day of week
    */
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
index 7c3ce5f..6bd4027 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/DataMapStoreManager.java
@@ -40,8 +40,8 @@ import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
 import org.apache.carbondata.core.metadata.CarbonMetadata;
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
 import org.apache.carbondata.core.metadata.schema.table.DataMapSchema;
+import org.apache.carbondata.core.metadata.schema.table.DataMapSchemaFactory;
 import org.apache.carbondata.core.metadata.schema.table.DataMapSchemaStorageProvider;
-import org.apache.carbondata.core.metadata.schema.table.DiskBasedDMSchemaStorageProvider;
 import org.apache.carbondata.core.metadata.schema.table.RelationIdentifier;
 import org.apache.carbondata.core.mutate.SegmentUpdateDetails;
 import org.apache.carbondata.core.mutate.UpdateVO;
@@ -87,8 +87,8 @@ public final class DataMapStoreManager {
 
   private Map<String, TableSegmentRefresher> segmentRefreshMap = new ConcurrentHashMap<>();
 
-  private DataMapSchemaStorageProvider provider = new DiskBasedDMSchemaStorageProvider(
-      CarbonProperties.getInstance().getSystemFolderLocation());
+  private DataMapSchemaStorageProvider provider =
+      DataMapSchemaFactory.getDataMapSchemaStorageProvider();
 
   private static final Logger LOGGER =
       LogServiceFactory.getLogService(DataMapStoreManager.class.getName());
@@ -603,21 +603,21 @@ public final class DataMapStoreManager {
     tablePathMap.remove(tableId);
   }
 
+
+
+
   /**
    * Clear the datamap/datamaps of a table from memory and disk
-   *
-   * @param identifier Table identifier
    */
-  public void deleteDataMap(AbsoluteTableIdentifier identifier, String dataMapName) {
-    CarbonTable carbonTable = getCarbonTable(identifier);
+  public void deleteDataMap(CarbonTable carbonTable, String dataMapName) {
     if (carbonTable == null) {
       // If carbon table is null then it means table is already deleted, therefore return without
       // doing any further changes.
       return;
     }
-    String tableId = identifier.getCarbonTableIdentifier().getTableId();
+    String tableId = carbonTable.getTableId();
     if (CarbonProperties.getInstance()
-        .isDistributedPruningEnabled(identifier.getDatabaseName(), identifier.getTableName())) {
+        .isDistributedPruningEnabled(carbonTable.getDatabaseName(), carbonTable.getTableName())) {
       try {
         DataMapUtil
             .executeClearDataMapJob(carbonTable, DataMapUtil.DISTRIBUTED_JOB_NAME, dataMapName);
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/status/DataMapStatusManager.java b/core/src/main/java/org/apache/carbondata/core/datamap/status/DataMapStatusManager.java
index f10b315..d8b935f 100644
--- a/core/src/main/java/org/apache/carbondata/core/datamap/status/DataMapStatusManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/status/DataMapStatusManager.java
@@ -25,6 +25,7 @@ import java.util.Map;
 
 import org.apache.carbondata.common.exceptions.sql.NoSuchDataMapException;
 import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datamap.DataMapStoreManager;
 import org.apache.carbondata.core.locks.ICarbonLock;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
@@ -34,6 +35,7 @@ import org.apache.carbondata.core.metadata.schema.table.RelationIdentifier;
 import org.apache.carbondata.core.statusmanager.LoadMetadataDetails;
 import org.apache.carbondata.core.statusmanager.SegmentStatus;
 import org.apache.carbondata.core.statusmanager.SegmentStatusManager;
+import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 
 import org.apache.log4j.Logger;
@@ -56,7 +58,7 @@ public class DataMapStatusManager {
    * TODO Use factory when we have more storage providers
    */
   private static DataMapStatusStorageProvider storageProvider =
-      new DiskBasedDataMapStatusProvider();
+      getDataMapStatusStorageProvider();
 
   /**
    * Reads all datamap status file
@@ -191,4 +193,15 @@ public class DataMapStatusManager {
       }
     }
   }
+
+  public static DataMapStatusStorageProvider getDataMapStatusStorageProvider() {
+    String providerProperties = CarbonProperties.getDataMapStorageProvider();
+    switch (providerProperties) {
+      case CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE_DATABASE:
+        return new DatabaseDataMapStatusProvider();
+      case CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE_DISK:
+      default:
+        return new DiskBasedDataMapStatusProvider();
+    }
+  }
 }
diff --git a/core/src/main/java/org/apache/carbondata/core/datamap/status/DatabaseDataMapStatusProvider.java b/core/src/main/java/org/apache/carbondata/core/datamap/status/DatabaseDataMapStatusProvider.java
new file mode 100644
index 0000000..838eb1d
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/datamap/status/DatabaseDataMapStatusProvider.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.datamap.status;
+
+import java.util.List;
+
+import org.apache.carbondata.core.metadata.schema.table.DataMapSchema;
+
+/**
+ * It saves/serializes the array of {{@link DataMapStatusDetail}} to database folder.
+ * It ensures the data consistance while concurrent write through write lock. It saves the status
+ * to the datamapstatus under the database folder.
+ * Now the implement not finished, it used to disable DataMap in multi-tenant scenario.
+ */
+public class DatabaseDataMapStatusProvider implements DataMapStatusStorageProvider {
+
+  @Override
+  public DataMapStatusDetail[] getDataMapStatusDetails() {
+    return new DataMapStatusDetail[0];
+  }
+
+  /**
+   * Update or add the status of passed datamaps with the given datamapstatus. If the datamapstatus
+   * given is enabled/disabled then updates/adds the datamap, in case of drop it just removes it
+   * from the file.
+   * This method always overwrites the old file.
+   * @param dataMapSchemas schemas of which are need to be updated in datamap status
+   * @param dataMapStatus  status to be updated for the datamap schemas
+   */
+  @Override
+  public void updateDataMapStatus(
+      List<DataMapSchema> dataMapSchemas, DataMapStatus dataMapStatus) {
+  }
+}
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java b/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java
index 187fc71..2330494 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/CarbonLockFactory.java
@@ -17,6 +17,10 @@
 
 package org.apache.carbondata.core.locks;
 
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Modifier;
+
 import org.apache.carbondata.common.logging.LogServiceFactory;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
@@ -45,6 +49,8 @@ public class CarbonLockFactory {
       .getProperty(CarbonCommonConstants.LOCK_PATH, CarbonCommonConstants.LOCK_PATH_DEFAULT)
       .toLowerCase();
 
+  private static Constructor lockConstructor;
+
   static {
     CarbonLockFactory.getLockTypeConfigured();
   }
@@ -66,7 +72,9 @@ public class CarbonLockFactory {
           getLockpath(absoluteTableIdentifier.getCarbonTableIdentifier().getTableId());
     }
     FileFactory.FileType fileType = FileFactory.getFileType(absoluteLockPath);
-    if (lockTypeConfigured.equals(CarbonCommonConstants.CARBON_LOCK_TYPE_ZOOKEEPER)) {
+    if (lockTypeConfigured.equals(CarbonCommonConstants.CARBON_LOCK_TYPE_CUSTOM)) {
+      return newCustomLock(absoluteLockPath, lockFile);
+    } else if (lockTypeConfigured.equals(CarbonCommonConstants.CARBON_LOCK_TYPE_ZOOKEEPER)) {
       return new ZooKeeperLocking(absoluteLockPath, lockFile);
     } else if (fileType == FileFactory.FileType.S3) {
       lockTypeConfigured = CarbonCommonConstants.CARBON_LOCK_TYPE_S3;
@@ -98,6 +106,8 @@ public class CarbonLockFactory {
       lockFileLocation = getLockpath("1");
     }
     switch (lockTypeConfigured) {
+      case CarbonCommonConstants.CARBON_LOCK_TYPE_CUSTOM:
+        return newCustomLock(lockFileLocation, lockFile);
       case CarbonCommonConstants.CARBON_LOCK_TYPE_LOCAL:
         return new LocalFileLock(lockFileLocation, lockFile);
       case CarbonCommonConstants.CARBON_LOCK_TYPE_ZOOKEEPER:
@@ -121,10 +131,61 @@ public class CarbonLockFactory {
         .getProperty(CarbonCommonConstants.LOCK_TYPE, CarbonCommonConstants.LOCK_TYPE_DEFAULT)
         .toUpperCase();
     LOGGER.info("Configured lock type is: " + lockTypeConfigured);
+    String lockClassName =
+        CarbonProperties.getInstance().getProperty(CarbonCommonConstants.LOCK_CLASS);
+    if (lockClassName == null) {
+      return;
+    }
+    CarbonLockFactory.lockConstructor = getCustomLockConstructor(lockClassName);
   }
 
   public static String getLockpath(String tableId) {
     return lockPath + CarbonCommonConstants.FILE_SEPARATOR + tableId;
   }
 
+  private static ICarbonLock newCustomLock(final String lockFileLocation, final String lockFile) {
+    if (lockConstructor == null) {
+      throw new IllegalArgumentException(
+          "Carbon property [" + CarbonCommonConstants.LOCK_CLASS + "] is not set.");
+    }
+    try {
+      return (ICarbonLock) lockConstructor.newInstance(lockFileLocation, lockFile);
+    } catch (InstantiationException | IllegalAccessException | InvocationTargetException e) {
+      throw new RuntimeException(e);
+    }
+  }
+
+  private static Constructor<?> getCustomLockConstructor(final String lockClassName) {
+    final Class<?> lockClass;
+    try {
+      lockClass = CarbonLockFactory.class.getClassLoader().loadClass(lockClassName);
+    } catch (ClassNotFoundException e) {
+      throw new IllegalArgumentException("The class [" + lockClassName + "] is not found.");
+    }
+    if (!ICarbonLock.class.isAssignableFrom(lockClass)) {
+      throw new IllegalArgumentException(
+          "The class [" + lockClassName + "] is not an ICarbonLock class.");
+    }
+    if (Modifier.isAbstract(lockClass.getModifiers())) {
+      throw new IllegalArgumentException(
+          "The class [" + lockClassName + "] can not be initialized.");
+    }
+    if (!Modifier.isPublic(lockClass.getModifiers())) {
+      throw new IllegalArgumentException(
+          "The class [" + lockClassName + "] is not a public class.");
+    }
+    final Constructor<?> lockConstructor;
+    try {
+      lockConstructor = lockClass.getConstructor(String.class, String.class);
+    } catch (NoSuchMethodException e) {
+      throw new IllegalArgumentException(
+          "The class [" + lockClassName + "] do not have the constructor(String, String).", e
+      );
+    }
+    if (!Modifier.isPublic(lockClass.getModifiers())) {
+      throw new IllegalArgumentException(
+          "The constructor [" + lockConstructor + "] is not a public constructor.");
+    }
+    return lockConstructor;
+  }
 }
diff --git a/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java b/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java
index 6d35bcf..0dc8bbf 100644
--- a/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java
+++ b/core/src/main/java/org/apache/carbondata/core/locks/ZooKeeperLocking.java
@@ -181,7 +181,7 @@ public class ZooKeeperLocking extends AbstractCarbonLock {
   public boolean unlock() {
     try {
       // exists will return null if the path doesn't exists.
-      if (null != zk.exists(lockPath, true)) {
+      if (lockPath != null && null != zk.exists(lockPath, true)) {
         zk.delete(lockPath, -1);
         lockPath = null;
       }
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/CarbonTableIdentifier.java b/core/src/main/java/org/apache/carbondata/core/metadata/CarbonTableIdentifier.java
index 6e6c6b6..ca8b31e 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/CarbonTableIdentifier.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/CarbonTableIdentifier.java
@@ -20,6 +20,8 @@ package org.apache.carbondata.core.metadata;
 import java.io.File;
 import java.io.Serializable;
 
+import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
+
 /**
  * Identifier class which will hold the table qualified name
  */
@@ -53,7 +55,7 @@ public class CarbonTableIdentifier implements Serializable {
     this.databaseName = databaseName;
     this.tableName = tableName;
     this.tableId = tableId;
-    tableUniqueName = databaseName + '_' + tableName;
+    tableUniqueName = CarbonTable.buildUniqueName(databaseName, tableName);
   }
 
   /**
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/DatabaseLocationProvider.java b/core/src/main/java/org/apache/carbondata/core/metadata/DatabaseLocationProvider.java
new file mode 100644
index 0000000..55f10eb
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/DatabaseLocationProvider.java
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.metadata;
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.util.CarbonProperties;
+
+/**
+ * support converting database name to session-related name
+ */
+public abstract class DatabaseLocationProvider {
+
+  private static final DatabaseLocationProvider PROVIDER;
+
+  static {
+    final String providerClassName = CarbonProperties.getInstance()
+        .getProperty(CarbonCommonConstants.DATABASE_LOCATION_PROVIDER);
+    final DatabaseLocationProvider provider;
+    if (providerClassName == null) {
+      provider = null;
+    } else {
+      try {
+        final Class providerClass =
+            DatabaseLocationProvider.class.getClassLoader().loadClass(providerClassName);
+        provider = (DatabaseLocationProvider) providerClass.newInstance();
+      } catch (ClassNotFoundException | InstantiationException | IllegalAccessException exception) {
+        throw new RuntimeException(
+            "Fail to construct database location provider[" + providerClassName + "].", exception);
+      }
+    }
+    PROVIDER = provider;
+  }
+
+  public static DatabaseLocationProvider get() {
+    return PROVIDER == null ? Default.INSTANCE : PROVIDER;
+  }
+
+  public abstract String provide(String originalDatabaseName);
+
+  private static final class Default extends DatabaseLocationProvider {
+
+    static final Default INSTANCE = new Default();
+
+    @Override
+    public String provide(final String originalDatabaseName) {
+      return originalDatabaseName;
+    }
+  }
+}
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DataTypes.java b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DataTypes.java
index d4ad8d0..e79d0dc 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DataTypes.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/datatype/DataTypes.java
@@ -62,7 +62,7 @@ public class DataTypes {
   static final int BYTE_ARRAY_TYPE_ID = 15;
   static final int SHORT_INT_TYPE_ID = 16;
   static final int LEGACY_LONG_TYPE_ID = 17;
-  static final int DECIMAL_TYPE_ID = 10;
+  public static final int DECIMAL_TYPE_ID = 10;
   public static final int ARRAY_TYPE_ID = 11;
   public static final int STRUCT_TYPE_ID = 12;
   public static final int MAP_TYPE_ID = 13;
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
index c35d3e0..88271c4 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTable.java
@@ -42,6 +42,7 @@ import org.apache.carbondata.core.datastore.block.SegmentProperties;
 import org.apache.carbondata.core.features.TableOperation;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
 import org.apache.carbondata.core.metadata.CarbonTableIdentifier;
+import org.apache.carbondata.core.metadata.DatabaseLocationProvider;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.BucketingInfo;
 import org.apache.carbondata.core.metadata.schema.PartitionInfo;
@@ -228,8 +229,8 @@ public class CarbonTable implements Serializable, Writable {
    * Return table unique name
    */
   public static String buildUniqueName(String databaseName, String tableName) {
-    return (databaseName + CarbonCommonConstants.UNDERSCORE + tableName).toLowerCase(
-        Locale.getDefault());
+    return (DatabaseLocationProvider.get().provide(databaseName) +
+        CarbonCommonConstants.UNDERSCORE + tableName).toLowerCase(Locale.getDefault());
   }
 
   /**
@@ -1129,6 +1130,10 @@ public class CarbonTable implements Serializable, Writable {
     }
   }
 
+  public String getGlobalSortPartitions() {
+    return tableInfo.getFactTable().getTableProperties().get("global_sort_partitions");
+  }
+
   @Override
   public void write(DataOutput out) throws IOException {
     tableInfo.write(out);
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTableBuilder.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTableBuilder.java
index e1d2162..a635033 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTableBuilder.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/CarbonTableBuilder.java
@@ -67,7 +67,7 @@ public class CarbonTableBuilder {
 
     TableInfo tableInfo = new TableInfo();
     tableInfo.setDatabaseName(databaseName);
-    tableInfo.setTableUniqueName(databaseName + "_" + tableName);
+    tableInfo.setTableUniqueName(CarbonTable.buildUniqueName(databaseName, tableName));
     tableInfo.setFactTable(tableSchema);
     tableInfo.setTablePath(tablePath);
     tableInfo.setTransactionalTable(isTransactionalTable);
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DataMapSchemaFactory.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DataMapSchemaFactory.java
index d7abc4b..a821bb3 100644
--- a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DataMapSchemaFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DataMapSchemaFactory.java
@@ -17,6 +17,9 @@
 
 package org.apache.carbondata.core.metadata.schema.table;
 
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.util.CarbonProperties;
+
 public class DataMapSchemaFactory {
   public static final DataMapSchemaFactory INSTANCE = new DataMapSchemaFactory();
 
@@ -29,4 +32,16 @@ public class DataMapSchemaFactory {
   public DataMapSchema getDataMapSchema(String dataMapName, String providerName) {
     return new DataMapSchema(dataMapName, providerName);
   }
+
+  public static DataMapSchemaStorageProvider getDataMapSchemaStorageProvider() {
+    String provider = CarbonProperties.getDataMapStorageProvider();
+    switch (provider) {
+      case CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE_DATABASE:
+        return new DatabaseDMSchemaStorageProvider();
+      case CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE_DISK:
+      default:
+        return new DiskBasedDMSchemaStorageProvider(
+            CarbonProperties.getInstance().getSystemFolderLocation());
+    }
+  }
 }
diff --git a/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DatabaseDMSchemaStorageProvider.java b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DatabaseDMSchemaStorageProvider.java
new file mode 100644
index 0000000..6a2eeec
--- /dev/null
+++ b/core/src/main/java/org/apache/carbondata/core/metadata/schema/table/DatabaseDMSchemaStorageProvider.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.metadata.schema.table;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.carbondata.common.exceptions.sql.NoSuchDataMapException;
+
+/**
+ * Stores datamap schema in database
+ */
+public class DatabaseDMSchemaStorageProvider implements DataMapSchemaStorageProvider {
+
+  public DatabaseDMSchemaStorageProvider() {
+  }
+
+  @Override
+  public void saveSchema(DataMapSchema dataMapSchema) {
+    throw new UnsupportedOperationException("not support saving DataMap schema into database");
+  }
+
+  @Override
+  public DataMapSchema retrieveSchema(String dataMapName)
+      throws NoSuchDataMapException {
+    throw new NoSuchDataMapException(dataMapName);
+  }
+
+  @Override
+  public List<DataMapSchema> retrieveSchemas(CarbonTable carbonTable) {
+    return new ArrayList<>(0);
+  }
+
+  @Override
+  public List<DataMapSchema> retrieveAllSchemas() throws IOException {
+    return new ArrayList<>(0);
+  }
+
+  @Override
+  public void dropSchema(String dataMapName) {
+    throw new UnsupportedOperationException("not support dropping DataMap schema from database");
+  }
+}
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/ExpressionResult.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/ExpressionResult.java
index 08ea041..f091ec5 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/ExpressionResult.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/ExpressionResult.java
@@ -389,6 +389,57 @@ public class ExpressionResult implements Comparable<ExpressionResult> {
 
   }
 
+  public Long getTimeAsMillisecond() throws FilterIllegalMemberException {
+    if (value == null) {
+      return null;
+    }
+    try {
+      DataType dataType = this.getDataType();
+      if (dataType == DataTypes.STRING) {
+        // Currently the query engine layer only supports yyyy-MM-dd HH:mm:ss date format
+        // no matter in which format the data is been stored, so while retrieving the direct
+        // surrogate value for filter member first it should be converted in date form as per
+        // above format and needs to retrieve time stamp.
+        SimpleDateFormat parser =
+            new SimpleDateFormat(CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT);
+        Date dateToStr;
+        try {
+          dateToStr = parser.parse(value.toString());
+          return dateToStr.getTime();
+        } catch (ParseException e) {
+          throw new FilterIllegalMemberException(
+              "Cannot convert" + this.getDataType().getName() + " to Time type value");
+        }
+      } else if (dataType == DataTypes.SHORT) {
+        return ((Short) value).longValue();
+      } else if (dataType == DataTypes.INT || dataType == DataTypes.LONG) {
+        return (Long) value;
+      } else if (dataType == DataTypes.DOUBLE) {
+        return (Long) value;
+      } else if (dataType == DataTypes.DATE) {
+        if (value instanceof java.sql.Date) {
+          return ((Date) value).getTime();
+        } else {
+          return (Long) value;
+        }
+      } else if (dataType == DataTypes.TIMESTAMP) {
+        if (value instanceof Date) {
+          return ((Date) value).getTime();
+        } else if (value instanceof Timestamp) {
+          return ((Timestamp) value).getTime();
+        } else {
+          return (Long) value;
+        }
+      } else {
+        throw new FilterIllegalMemberException(
+            "Cannot convert " + this.getDataType().getName() + " to Time type value");
+      }
+    } catch (ClassCastException e) {
+      throw new FilterIllegalMemberException(
+          "Cannot convert " + this.getDataType().getName() + " to Time type value");
+    }
+  }
+
   public Boolean getBoolean() throws FilterIllegalMemberException {
     if (value == null) {
       return null;
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/EqualToExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/EqualToExpression.java
index 0588f2c..fb5e9e4 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/EqualToExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/EqualToExpression.java
@@ -78,8 +78,10 @@ public class EqualToExpression extends BinaryConditionalExpression {
       result = val1.getInt().equals(val2.getInt());
     } else if (dataType == DataTypes.DOUBLE) {
       result = FilterUtil.nanSafeEqualsDoubles(val1.getDouble(), val2.getDouble());
-    } else if (dataType == DataTypes.DATE || dataType == DataTypes.TIMESTAMP) {
+    } else if (dataType == DataTypes.DATE) {
       result = val1.getTime().equals(val2.getTime());
+    } else if (dataType == DataTypes.TIMESTAMP) {
+      result = val1.getTimeAsMillisecond().equals(val2.getTimeAsMillisecond());
     } else if (dataType == DataTypes.LONG) {
       result = val1.getLong().equals(val2.getLong());
     } else if (DataTypes.isDecimal(dataType)) {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanEqualToExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanEqualToExpression.java
index c15ae62..64cd6c7 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanEqualToExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanEqualToExpression.java
@@ -60,8 +60,10 @@ public class GreaterThanEqualToExpression extends BinaryConditionalExpression {
       result = elRes.getInt() >= (erRes.getInt());
     } else if (dataType == DataTypes.DOUBLE) {
       result = elRes.getDouble() >= (erRes.getDouble());
-    } else if (dataType == DataTypes.DATE || dataType == DataTypes.TIMESTAMP) {
+    } else if (dataType == DataTypes.DATE) {
       result = elRes.getTime() >= (erRes.getTime());
+    } else if (dataType == DataTypes.TIMESTAMP) {
+      result = elRes.getTimeAsMillisecond() >= (erRes.getTimeAsMillisecond());
     } else if (dataType == DataTypes.LONG) {
       result = elRes.getLong() >= (erRes.getLong());
     } else if (DataTypes.isDecimal(dataType)) {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanExpression.java
index 8cbb258..2ca56a5 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanExpression.java
@@ -62,8 +62,10 @@ public class GreaterThanExpression extends BinaryConditionalExpression {
       result = exprLeftRes.getShort() > (exprRightRes.getShort());
     } else if (dataType == DataTypes.INT) {
       result = exprLeftRes.getInt() > (exprRightRes.getInt());
-    } else if (dataType == DataTypes.DATE || dataType == DataTypes.TIMESTAMP) {
+    } else if (dataType == DataTypes.DATE) {
       result = exprLeftRes.getTime() > (exprRightRes.getTime());
+    } else if (dataType == DataTypes.TIMESTAMP) {
+      result = exprLeftRes.getTimeAsMillisecond() > (exprRightRes.getTimeAsMillisecond());
     } else if (dataType == DataTypes.LONG) {
       result = exprLeftRes.getLong() > (exprRightRes.getLong());
     } else if (DataTypes.isDecimal(dataType)) {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/InExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/InExpression.java
index 11d538e..390ff28 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/InExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/InExpression.java
@@ -67,8 +67,10 @@ public class InExpression extends BinaryConditionalExpression {
           val = new ExpressionResult(val.getDataType(), expressionResVal.getDouble());
         } else if (dataType == DataTypes.LONG) {
           val = new ExpressionResult(val.getDataType(), expressionResVal.getLong());
-        } else if (dataType == DataTypes.DATE || dataType == DataTypes.TIMESTAMP) {
+        } else if (dataType == DataTypes.DATE) {
           val = new ExpressionResult(val.getDataType(), expressionResVal.getTime());
+        } else if (dataType == DataTypes.TIMESTAMP) {
+          val = new ExpressionResult(val.getDataType(), expressionResVal.getTimeAsMillisecond());
         } else if (DataTypes.isDecimal(dataType)) {
           val = new ExpressionResult(val.getDataType(), expressionResVal.getDecimal());
         } else {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/LessThanEqualToExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/LessThanEqualToExpression.java
index 6fee008..e2b0512 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/LessThanEqualToExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/LessThanEqualToExpression.java
@@ -60,8 +60,10 @@ public class LessThanEqualToExpression extends BinaryConditionalExpression {
       result = elRes.getInt() <= (erRes.getInt());
     } else if (dataType == DataTypes.DOUBLE) {
       result = elRes.getDouble() <= (erRes.getDouble());
-    } else if (dataType == DataTypes.DATE || dataType == DataTypes.TIMESTAMP) {
+    } else if (dataType == DataTypes.DATE) {
       result = elRes.getTime() <= (erRes.getTime());
+    } else if (dataType == DataTypes.TIMESTAMP) {
+      result = elRes.getTimeAsMillisecond() <= (erRes.getTimeAsMillisecond());
     } else if (dataType == DataTypes.LONG) {
       result = elRes.getLong() <= (erRes.getLong());
     } else if (DataTypes.isDecimal(dataType)) {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/LessThanExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/LessThanExpression.java
index 4e4c240..bee6dfe 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/LessThanExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/LessThanExpression.java
@@ -64,8 +64,10 @@ public class LessThanExpression extends BinaryConditionalExpression {
       result = elRes.getInt() < (erRes.getInt());
     } else if (dataType == DataTypes.DOUBLE) {
       result = elRes.getDouble() < (erRes.getDouble());
-    } else if (dataType == DataTypes.DATE || dataType == DataTypes.TIMESTAMP) {
+    } else if (dataType == DataTypes.DATE) {
       result = elRes.getTime() < (erRes.getTime());
+    } else if (dataType == DataTypes.TIMESTAMP) {
+      result = elRes.getTimeAsMillisecond() < (erRes.getTimeAsMillisecond());
     } else if (dataType == DataTypes.LONG) {
       result = elRes.getLong() < (erRes.getLong());
     } else if (DataTypes.isDecimal(dataType)) {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotEqualsExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotEqualsExpression.java
index eae8019..69c7cc5 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotEqualsExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotEqualsExpression.java
@@ -74,8 +74,10 @@ public class NotEqualsExpression extends BinaryConditionalExpression {
       result = val1.getInt().intValue() != val2.getInt().intValue();
     } else if (dataType == DataTypes.DOUBLE) {
       result = val1.getDouble().doubleValue() != val2.getDouble().doubleValue();
-    } else if (dataType == DataTypes.DATE || dataType == DataTypes.TIMESTAMP) {
+    } else if (dataType == DataTypes.DATE) {
       result = val1.getTime().longValue() != val2.getTime().longValue();
+    } else if (dataType == DataTypes.TIMESTAMP) {
+      result = val1.getTimeAsMillisecond().longValue() != val2.getTimeAsMillisecond().longValue();
     } else if (dataType == DataTypes.LONG) {
       result = elRes.getLong().longValue() != (erRes.getLong()).longValue();
     } else if (DataTypes.isDecimal(dataType)) {
diff --git a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotInExpression.java b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotInExpression.java
index cbc2995..6bd09a5 100644
--- a/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotInExpression.java
+++ b/core/src/main/java/org/apache/carbondata/core/scan/expression/conditional/NotInExpression.java
@@ -86,8 +86,10 @@ public class NotInExpression extends BinaryConditionalExpression {
           val = new ExpressionResult(val.getDataType(), exprResVal.getInt());
         } else if (dataType == DataTypes.DOUBLE) {
           val = new ExpressionResult(val.getDataType(), exprResVal.getDouble());
-        } else if (dataType == DataTypes.DATE || dataType == DataTypes.TIMESTAMP) {
+        } else if (dataType == DataTypes.DATE) {
           val = new ExpressionResult(val.getDataType(), exprResVal.getTime());
+        } else if (dataType == DataTypes.TIMESTAMP) {
+          val = new ExpressionResult(val.getDataType(), exprResVal.getTimeAsMillisecond());
         } else if (dataType == DataTypes.LONG) {
           val = new ExpressionResult(val.getDataType(), exprResVal.getLong());
         } else if (DataTypes.isDecimal(dataType)) {
diff --git a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
index 2b76db1..62eb8a2 100755
--- a/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
+++ b/core/src/main/java/org/apache/carbondata/core/statusmanager/SegmentStatusManager.java
@@ -37,6 +37,7 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datamap.Segment;
 import org.apache.carbondata.core.datastore.filesystem.CarbonFile;
 import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.exception.ConcurrentOperationException;
 import org.apache.carbondata.core.fileoperations.AtomicFileOperationFactory;
 import org.apache.carbondata.core.fileoperations.AtomicFileOperations;
 import org.apache.carbondata.core.fileoperations.FileWriteOperation;
@@ -1100,6 +1101,58 @@ public class SegmentStatusManager {
     }
   }
 
+  public static void truncateTable(CarbonTable carbonTable)
+      throws ConcurrentOperationException, IOException {
+    ICarbonLock carbonTableStatusLock = CarbonLockFactory.getCarbonLockObj(
+        carbonTable.getAbsoluteTableIdentifier(), LockUsage.TABLE_STATUS_LOCK);
+    boolean locked = false;
+    try {
+      // Update load metadate file after cleaning deleted nodes
+      locked = carbonTableStatusLock.lockWithRetries();
+      if (locked) {
+        LOG.info("Table status lock has been successfully acquired.");
+        LoadMetadataDetails[] listOfLoadFolderDetailsArray =
+            SegmentStatusManager.readLoadMetadata(
+                CarbonTablePath.getMetadataPath(carbonTable.getTablePath()));
+        for (LoadMetadataDetails listOfLoadFolderDetails : listOfLoadFolderDetailsArray) {
+          boolean writing;
+          switch (listOfLoadFolderDetails.getSegmentStatus()) {
+            case INSERT_IN_PROGRESS:
+            case INSERT_OVERWRITE_IN_PROGRESS:
+            case STREAMING:
+              writing = true;
+              break;
+            default:
+              writing = false;
+          }
+          if (writing) {
+            throw new ConcurrentOperationException(carbonTable, "insert", "truncate");
+          }
+        }
+        for (LoadMetadataDetails listOfLoadFolderDetails : listOfLoadFolderDetailsArray) {
+          listOfLoadFolderDetails.setSegmentStatus(SegmentStatus.MARKED_FOR_DELETE);
+        }
+        SegmentStatusManager
+            .writeLoadDetailsIntoFile(
+                CarbonTablePath.getTableStatusFilePath(carbonTable.getTablePath()),
+                listOfLoadFolderDetailsArray);
+      } else {
+        String dbName = carbonTable.getDatabaseName();
+        String tableName = carbonTable.getTableName();
+        String errorMsg = "truncate table request is failed for " +
+            dbName + "." + tableName +
+            ". Not able to acquire the table status lock due to other operation " +
+            "running in the background.";
+        LOG.error(errorMsg);
+        throw new IOException(errorMsg + " Please try after some time.");
+      }
+    } finally {
+      if (locked) {
+        CarbonLockUtil.fileUnlock(carbonTableStatusLock, LockUsage.TABLE_STATUS_LOCK);
+      }
+    }
+  }
+
   /**
    * Get the number of invisible segment info from segment info list.
    */
diff --git a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
index 8e5e9f8..05754f2 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/CarbonProperties.java
@@ -205,6 +205,9 @@ public final class CarbonProperties {
       case CarbonCommonConstants.CARBON_LOCAL_DICTIONARY_SIZE_THRESHOLD_IN_MB:
         validateAndGetLocalDictionarySizeThresholdInMB();
         break;
+      case CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE:
+        validateDMSchemaStorageProvider();
+        break;
       // TODO : Validation for carbon.lock.type should be handled for addProperty flow
       default:
         // none
@@ -397,6 +400,8 @@ public final class CarbonProperties {
       // CARBON_LOCK_TYPE_LOCAL and for the distributed one CARBON_LOCK_TYPE_HDFS
       case CarbonCommonConstants.CARBON_LOCK_TYPE_ZOOKEEPER:
         break;
+      case  CarbonCommonConstants.CARBON_LOCK_TYPE_CUSTOM:
+        break;
       case CarbonCommonConstants.CARBON_LOCK_TYPE_LOCAL:
       case CarbonCommonConstants.CARBON_LOCK_TYPE_HDFS:
       default:
@@ -1663,6 +1668,31 @@ public final class CarbonProperties {
     }
   }
 
+  private void validateDMSchemaStorageProvider() {
+    String provider =
+        carbonProperties.getProperty(CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE);
+    if (provider == null) {
+      carbonProperties.setProperty(
+          CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE,
+          CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE_DEFAULT);
+    } else {
+      switch (provider.toUpperCase()) {
+        case CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE_DISK:
+          break;
+        case  CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE_DATABASE:
+          break;
+        default:
+          LOGGER.warn("The value \"" + provider + "\" configured for key "
+              + CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE
+              + " is invalid for current file system. Use the default value "
+              + CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE_DEFAULT + " instead.");
+          carbonProperties.setProperty(
+              CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE,
+              CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE_DEFAULT);
+      }
+    }
+  }
+
   /**
    * Check whether the Distributed Pruning is enabled by the user or not.
    */
@@ -1856,4 +1886,12 @@ public final class CarbonProperties {
     }
   }
 
+  public static String getDataMapStorageProvider() {
+    String provider = CarbonProperties.getInstance()
+        .getProperty(CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE);
+    if (provider == null) {
+      return CarbonCommonConstants.CARBON_DATAMAP_SCHEMA_STORAGE_DEFAULT;
+    }
+    return provider.toUpperCase();
+  }
 }
diff --git a/core/src/main/java/org/apache/carbondata/core/util/ThreadLocalSessionInfo.java b/core/src/main/java/org/apache/carbondata/core/util/ThreadLocalSessionInfo.java
index f85a350..6612a44 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/ThreadLocalSessionInfo.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/ThreadLocalSessionInfo.java
@@ -51,4 +51,18 @@ public class ThreadLocalSessionInfo {
   public static void unsetAll() {
     threadLocal.remove();
   }
+
+  /**
+   * use to check whether the thread has the hadoop configuration or not
+   */
+  public static void shouldHaveCarbonConf() {
+    CarbonSessionInfo sessionInfo = getCarbonSessionInfo();
+    if (sessionInfo == null) {
+      throw new RuntimeException("the thread not contain CarbonSessionInfo");
+    }
+    Object conf = sessionInfo.getNonSerializableExtraInfo().get("carbonConf");
+    if (conf == null) {
+      throw new RuntimeException("the thread not contain hadoop configuration");
+    }
+  }
 }
diff --git a/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java b/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
index 0be3642..ea7542e 100644
--- a/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
+++ b/core/src/main/java/org/apache/carbondata/core/util/path/CarbonTablePath.java
@@ -552,7 +552,7 @@ public class CarbonTablePath {
   public static String getCarbonDataFileName(String carbonDataFilePath) {
     return carbonDataFilePath.substring(
         carbonDataFilePath.lastIndexOf(CarbonCommonConstants.FILE_SEPARATOR) + 1,
-        carbonDataFilePath.indexOf(CARBON_DATA_EXT));
+        carbonDataFilePath.lastIndexOf(CARBON_DATA_EXT));
   }
 
   /**
diff --git a/core/src/test/java/org/apache/carbondata/core/carbon/CarbonTableIdentifierTest.java b/core/src/test/java/org/apache/carbondata/core/carbon/CarbonTableIdentifierTest.java
index b7707af..707428a 100644
--- a/core/src/test/java/org/apache/carbondata/core/carbon/CarbonTableIdentifierTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/carbon/CarbonTableIdentifierTest.java
@@ -93,6 +93,6 @@ public class CarbonTableIdentifierTest {
   @Test public void toStringTest() {
     String res = carbonTableIdentifier.toString();
     System.out.printf("sfdsdf " + res);
-    Assert.assertTrue(res.equals("DatabseName_tableName"));
+    Assert.assertTrue(res.equalsIgnoreCase("DatabseName_tableName"));
   }
 }
diff --git a/core/src/test/java/org/apache/carbondata/core/locks/CarbonLockFactoryTest.java b/core/src/test/java/org/apache/carbondata/core/locks/CarbonLockFactoryTest.java
new file mode 100644
index 0000000..f6c61010
--- /dev/null
+++ b/core/src/test/java/org/apache/carbondata/core/locks/CarbonLockFactoryTest.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.locks;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.locks.ReentrantLock;
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.util.CarbonProperties;
+import org.junit.Assert;
+import org.junit.Test;
+
+public class CarbonLockFactoryTest {
+
+  @Test
+  public void testCustomLock() {
+    CarbonProperties.getInstance().addProperty(
+        CarbonCommonConstants.LOCK_TYPE,
+        CarbonCommonConstants.CARBON_LOCK_TYPE_CUSTOM
+    );
+    CarbonProperties.getInstance().addProperty(
+        CarbonCommonConstants.LOCK_CLASS,
+        CustomLock.class.getName()
+    );
+    final List<Thread> threadList = new ArrayList<>(2);
+    for (int index = 0; index < 2; index++) {
+      threadList.add(new Thread() {
+        @Override
+        public void run() {
+          ICarbonLock lock = CarbonLockFactory.getSystemLevelCarbonLockObj("root", "test");
+          // do something.
+          try {
+            Thread.sleep(1500L);
+          } catch (InterruptedException ignore) {
+            // ignore.
+          }
+          lock.unlock();
+        }
+      });
+    }
+    final long startTime = System.nanoTime();
+    for (Thread thread : threadList) {
+      thread.start();
+    }
+    for (Thread thread : threadList) {
+      try {
+        thread.join();
+      } catch (InterruptedException ignore) {
+        // ignore.
+      }
+    }
+    Assert.assertTrue(
+        TimeUnit.MILLISECONDS.convert(System.nanoTime() - startTime, TimeUnit.NANOSECONDS) > 3000L
+    );
+  }
+
+  public static final class CustomLock implements ICarbonLock {
+
+    private static final ConcurrentHashMap<String, ReentrantLock> LOCK_MAP = new ConcurrentHashMap<>();
+
+    private static ReentrantLock getLock(final String lockIdentifier) {
+      ReentrantLock lock = LOCK_MAP.get(lockIdentifier);
+      if (lock == null) {
+        synchronized (LOCK_MAP) {
+          lock = LOCK_MAP.get(lockIdentifier);
+          if (lock == null) {
+            lock = new ReentrantLock();
+            LOCK_MAP.put(lockIdentifier, lock);
+          }
+        }
+      }
+      return lock;
+    }
+
+    public CustomLock(final String absoluteLockPath, final String lockFile) {
+      this.lockIdentifier = absoluteLockPath + lockFile;
+      getLock(this.lockIdentifier).lock();
+    }
+
+    private final String lockIdentifier;
+
+    @Override
+    public boolean unlock() {
+      getLock(this.lockIdentifier).unlock();
+      return true;
+    }
+
+    @Override
+    public boolean lockWithRetries() {
+      // TODO
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public boolean lockWithRetries(int retryCount, int retryInterval) {
+      // TODO
+      throw new UnsupportedOperationException();
+    }
+
+    @Override
+    public boolean releaseLockManually(String lockFile) {
+      getLock(lockFile).unlock();
+      return true;
+    }
+
+    @Override
+    public String getLockFilePath() {
+      return this.lockIdentifier;
+    }
+
+  }
+
+}
diff --git a/core/src/test/java/org/apache/carbondata/core/metadata/DatabaseLocationProviderTest.java b/core/src/test/java/org/apache/carbondata/core/metadata/DatabaseLocationProviderTest.java
new file mode 100644
index 0000000..98aa038
--- /dev/null
+++ b/core/src/test/java/org/apache/carbondata/core/metadata/DatabaseLocationProviderTest.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.core.metadata;
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants;
+import org.apache.carbondata.core.util.CarbonProperties;
+
+import org.junit.Assert;
+import org.junit.Ignore;
+
+public class DatabaseLocationProviderTest {
+
+  @Ignore
+  public void testCustomProvider() {
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.DATABASE_LOCATION_PROVIDER,
+        "org.apache.carbondata.core.metadata.DatabaseLocationProviderTest$TestProvider");
+    Assert.assertEquals("test", DatabaseLocationProvider.get().provide("databaseName"));
+    CarbonProperties.getInstance().removeProperty(CarbonCommonConstants.DATABASE_LOCATION_PROVIDER);
+  }
+
+  public static final class TestProvider extends DatabaseLocationProvider {
+
+    @Override
+    public String provide(String originalDatabaseName) {
+      return "test";
+    }
+
+  }
+
+}
diff --git a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/EqualToExpressionUnitTest.java b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/EqualToExpressionUnitTest.java
index 836cfeb..f2e82ad 100644
--- a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/EqualToExpressionUnitTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/EqualToExpressionUnitTest.java
@@ -171,7 +171,7 @@ public class EqualToExpressionUnitTest {
       value.setValues(objectRow);
 
       new MockUp<ExpressionResult>() {
-        @Mock public Long getTime() {
+        @Mock public Long getTimeAsMillisecond() {
           return 18465213000000L;
         }
       };
diff --git a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanEqualToExpressionUnitTest.java b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanEqualToExpressionUnitTest.java
index 7750a88..ef97965 100644
--- a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanEqualToExpressionUnitTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanEqualToExpressionUnitTest.java
@@ -289,7 +289,7 @@ public class GreaterThanEqualToExpressionUnitTest {
       value.setValues(objectRow);
 
       new MockUp<ExpressionResult>() {
-        @Mock public Long getTime() {
+        @Mock public Long getTimeAsMillisecond() {
           return 18465213000000L;
         }
       };
@@ -320,7 +320,7 @@ public class GreaterThanEqualToExpressionUnitTest {
 
       new MockUp<ExpressionResult>() {
         boolean isFirst = true;
-        @Mock public Long getTime() {
+        @Mock public Long getTimeAsMillisecond() {
           if (isFirst) {
             isFirst = false;
             return 1190505600L;
diff --git a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanExpressionUnitTest.java b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanExpressionUnitTest.java
index c1bfe50..625b7b2 100644
--- a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanExpressionUnitTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/GreaterThanExpressionUnitTest.java
@@ -235,7 +235,7 @@ public class GreaterThanExpressionUnitTest {
       new MockUp<ExpressionResult>() {
         Boolean returnMockFlag = true;
 
-        @Mock public Long getTime() {
+        @Mock public Long getTimeAsMillisecond() {
           if (returnMockFlag) {
             returnMockFlag = false;
             return 1190592000L;
@@ -280,7 +280,7 @@ public class GreaterThanExpressionUnitTest {
       new MockUp<ExpressionResult>() {
         Boolean returnMockFlag = true;
 
-        @Mock public Long getTime() {
+        @Mock public Long getTimeAsMillisecond() {
           if (returnMockFlag) {
             returnMockFlag = false;
             return 1190505600L;
diff --git a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/LessThanEqualToExpressionUnitTest.java b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/LessThanEqualToExpressionUnitTest.java
index a9c7757..b48fc6e 100644
--- a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/LessThanEqualToExpressionUnitTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/LessThanEqualToExpressionUnitTest.java
@@ -238,7 +238,7 @@ public class LessThanEqualToExpressionUnitTest {
       new MockUp<ExpressionResult>() {
         Boolean returnMockFlag = true;
 
-        @Mock public Long getTime() {
+        @Mock public Long getTimeAsMillisecond() {
           if (returnMockFlag) {
             returnMockFlag = false;
             return 1190505600L;
@@ -283,7 +283,7 @@ public class LessThanEqualToExpressionUnitTest {
       new MockUp<ExpressionResult>() {
         Boolean returnMockFlag = true;
 
-        @Mock public Long getTime() {
+        @Mock public Long getTimeAsMillisecond() {
           if (returnMockFlag) {
             returnMockFlag = false;
             return 1190592000L;
diff --git a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/LessThanExpressionUnitTest.java b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/LessThanExpressionUnitTest.java
index d346f57..8206a6b 100644
--- a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/LessThanExpressionUnitTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/LessThanExpressionUnitTest.java
@@ -266,7 +266,7 @@ public class LessThanExpressionUnitTest {
       new MockUp<ExpressionResult>() {
         Boolean returnMockFlag = true;
 
-        @Mock public Long getTime() {
+        @Mock public Long getTimeAsMillisecond() {
           if (returnMockFlag) {
             returnMockFlag = false;
             return 1190505600L;
@@ -311,7 +311,7 @@ public class LessThanExpressionUnitTest {
       new MockUp<ExpressionResult>() {
         Boolean returnMockFlag = true;
 
-        @Mock public Long getTime() {
+        @Mock public Long getTimeAsMillisecond() {
           if (returnMockFlag) {
             returnMockFlag = false;
             return 1190592000L;
diff --git a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/NotEqualsExpressionUnitTest.java b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/NotEqualsExpressionUnitTest.java
index 998c680..ece6d16 100644
--- a/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/NotEqualsExpressionUnitTest.java
+++ b/core/src/test/java/org/apache/carbondata/core/scan/expression/conditional/NotEqualsExpressionUnitTest.java
@@ -304,7 +304,7 @@ public class NotEqualsExpressionUnitTest {
       new MockUp<ExpressionResult>() {
         Boolean returnMockFlag = true;
 
-        @Mock public Long getTime() {
+        @Mock public Long getTimeAsMillisecond() {
           if (returnMockFlag) {
             returnMockFlag = false;
             return 1190592000L;
@@ -349,7 +349,7 @@ public class NotEqualsExpressionUnitTest {
       new MockUp<ExpressionResult>() {
         Boolean returnMockFlag = true;
 
-        @Mock public Long getTime() {
+        @Mock public Long getTimeAsMillisecond() {
           return 1190592000L;
         }
       };
diff --git a/datamap/examples/src/minmaxdatamap/test/scala/org/apache/carbondata/datamap/examples/MinMaxDataMapSuite.scala b/datamap/examples/src/minmaxdatamap/test/scala/org/apache/carbondata/datamap/examples/MinMaxDataMapSuite.scala
index 8436e07..53870f9 100644
--- a/datamap/examples/src/minmaxdatamap/test/scala/org/apache/carbondata/datamap/examples/MinMaxDataMapSuite.scala
+++ b/datamap/examples/src/minmaxdatamap/test/scala/org/apache/carbondata/datamap/examples/MinMaxDataMapSuite.scala
@@ -43,13 +43,13 @@ class MinMaxDataMapSuite extends QueryTest with BeforeAndAfterAll {
       s"""
          | CREATE TABLE $normalTable(id INT, name STRING, city STRING, age INT,
          | s1 STRING, s2 STRING, s3 STRING, s4 STRING, s5 STRING, s6 STRING, s7 STRING, s8 STRING)
-         | STORED BY 'carbondata' TBLPROPERTIES('table_blocksize'='128')
+         | STORED AS carbondata TBLPROPERTIES('table_blocksize'='128')
          |  """.stripMargin)
     sql(
       s"""
         | CREATE TABLE $minMaxDMSampleTable(id INT, name STRING, city STRING, age INT,
         | s1 STRING, s2 STRING, s3 STRING, s4 STRING, s5 STRING, s6 STRING, s7 STRING, s8 STRING)
-        | STORED BY 'carbondata' TBLPROPERTIES('table_blocksize'='128')
+        | STORED AS carbondata TBLPROPERTIES('table_blocksize'='128')
         |  """.stripMargin)
     sql(
       s"""
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
index be292be..cf76e48 100644
--- a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
+++ b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVAnalyzerRule.scala
@@ -83,23 +83,27 @@ class MVAnalyzerRule(sparkSession: SparkSession) extends Rule[LogicalPlan] {
         }
         Aggregate(grp, aExp, child)
     }
-    var catalog = DataMapStoreManager.getInstance().getDataMapCatalog(dataMapProvider,
-      DataMapClassProvider.MV.getShortName).asInstanceOf[SummaryDatasetCatalog]
-    // when first time DataMapCatalogs are initialized, it stores session info also, but when carbon
-    // session is newly created, catalog map will not be cleared, so if session info is different,
-    // remove the entry from map.
-    if (catalog != null && !catalog.mvSession.sparkSession.equals(sparkSession)) {
-      DataMapStoreManager.getInstance().clearDataMapCatalog()
-      catalog = DataMapStoreManager.getInstance().getDataMapCatalog(dataMapProvider,
+    if (needAnalysis) {
+      var catalog = DataMapStoreManager.getInstance().getDataMapCatalog(dataMapProvider,
         DataMapClassProvider.MV.getShortName).asInstanceOf[SummaryDatasetCatalog]
-    }
-    if (needAnalysis && catalog != null && isValidPlan(plan, catalog)) {
-      val modularPlan = catalog.mvSession.sessionState.rewritePlan(plan).withMVTable
-      if (modularPlan.find(_.rewritten).isDefined) {
-        var compactSQL = modularPlan.asCompactSQL
-        compactSQL = reWriteTheUDFInSQLWithQualifierName(modularPlan, compactSQL)
-        val analyzed = sparkSession.sql(compactSQL).queryExecution.analyzed
-        analyzed
+      // when first time DataMapCatalogs are initialized, it stores session info also,
+      // but when carbon session is newly created, catalog map will not be cleared,
+      // so if session info is different, remove the entry from map.
+      if (catalog != null && !catalog.mvSession.sparkSession.equals(sparkSession)) {
+        DataMapStoreManager.getInstance().clearDataMapCatalog()
+        catalog = DataMapStoreManager.getInstance().getDataMapCatalog(dataMapProvider,
+          DataMapClassProvider.MV.getShortName).asInstanceOf[SummaryDatasetCatalog]
+      }
+      if (catalog != null && isValidPlan(plan, catalog)) {
+        val modularPlan = catalog.mvSession.sessionState.rewritePlan(plan).withMVTable
+        if (modularPlan.find(_.rewritten).isDefined) {
+          var compactSQL = modularPlan.asCompactSQL
+          compactSQL = reWriteTheUDFInSQLWithQualifierName(modularPlan, compactSQL)
+          val analyzed = sparkSession.sql(compactSQL).queryExecution.analyzed
+          analyzed
+        } else {
+          plan
+        }
       } else {
         plan
       }
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVDataMapProvider.scala b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVDataMapProvider.scala
index 2550edc..d4720a4 100644
--- a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVDataMapProvider.scala
+++ b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVDataMapProvider.scala
@@ -20,13 +20,10 @@ import java.io.IOException
 
 import scala.collection.JavaConverters._
 
-import org.apache.spark.sql.{CarbonEnv, CarbonUtils, SparkSession}
-import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.catalyst.plans.logical.SubqueryAlias
+import org.apache.spark.sql.{CarbonUtils, SparkSession}
 import org.apache.spark.sql.execution.command.management.CarbonLoadDataCommand
 import org.apache.spark.sql.execution.command.table.CarbonDropTableCommand
-import org.apache.spark.sql.execution.datasources.FindDataSourceTable
-import org.apache.spark.sql.parser.CarbonSpark2SqlParser
+import org.apache.spark.sql.parser.CarbonSparkSqlParserUtil
 import org.apache.spark.sql.util.SparkSQLUtil
 
 import org.apache.carbondata.common.annotations.InterfaceAudience
@@ -121,10 +118,7 @@ class MVDataMapProvider(
     val ctasQuery = dataMapSchema.getCtasQuery
     if (ctasQuery != null) {
       val identifier = dataMapSchema.getRelationIdentifier
-      val updatedQuery = new CarbonSpark2SqlParser().addMVSkipFunction(ctasQuery)
-      val queryPlan = SparkSQLUtil.execute(
-        sparkSession.sql(updatedQuery).queryExecution.analyzed,
-        sparkSession).drop("mv")
+      val updatedQuery = CarbonSparkSqlParserUtil.getMVQuery(ctasQuery, sparkSession)
       var isOverwriteTable = false
       val isFullRefresh =
         if (null != dataMapSchema.getProperties.get("full_refresh")) {
@@ -154,7 +148,7 @@ class MVDataMapProvider(
         options = scala.collection.immutable.Map("fileheader" -> header),
         isOverwriteTable,
         inputSqlString = null,
-        dataFrame = Some(queryPlan),
+        dataFrame = Some(updatedQuery),
         updateModel = None,
         tableInfoOp = None,
         internalOptions = Map("mergedSegmentName" -> newLoadName,
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVHelper.scala b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVHelper.scala
index b32367b..adff89d 100644
--- a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVHelper.scala
+++ b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/datamap/MVHelper.scala
@@ -23,7 +23,7 @@ import scala.collection.mutable
 import scala.collection.mutable.ArrayBuffer
 
 import org.apache.spark.sql.{CarbonEnv, CarbonToSparkAdapter, SparkSession}
-import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.{CarbonParserUtil, TableIdentifier}
 import org.apache.spark.sql.catalyst.catalog.CatalogTable
 import org.apache.spark.sql.catalyst.expressions.{Alias, Attribute, AttributeReference, Cast, Coalesce, Expression, Literal, NamedExpression, ScalaUDF, SortOrder}
 import org.apache.spark.sql.catalyst.expressions.aggregate._
@@ -32,7 +32,7 @@ import org.apache.spark.sql.execution.command.{Field, PartitionerField, TableMod
 import org.apache.spark.sql.execution.command.table.{CarbonCreateTableCommand, CarbonDropTableCommand}
 import org.apache.spark.sql.execution.command.timeseries.{TimeSeriesFunction, TimeSeriesUtil}
 import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.parser.CarbonSpark2SqlParser
+import org.apache.spark.sql.parser.CarbonSparkSqlParserUtil
 import org.apache.spark.sql.types.{ArrayType, DateType, MapType, StructType}
 import org.apache.spark.util.{DataMapUtil, PartitionUtils}
 
@@ -68,9 +68,8 @@ object MVHelper {
     }
     val mvUtil = new MVUtil
     mvUtil.validateDMProperty(dmProperties)
-    val updatedQuery = new CarbonSpark2SqlParser().addMVSkipFunction(queryString)
-    val query = sparkSession.sql(updatedQuery)
-    val logicalPlan = MVHelper.dropDummFuc(query.queryExecution.analyzed)
+    val logicalPlan = MVHelper.dropDummFuc(
+      CarbonSparkSqlParserUtil.getMVPlan(queryString, sparkSession))
     // if there is limit in MV ctas query string, throw exception, as its not a valid usecase
     logicalPlan match {
       case Limit(_, _) =>
@@ -233,9 +232,9 @@ object MVHelper {
     TableIdentifier(dataMapSchema.getDataMapName + "_table",
       selectTables.head.identifier.database)
     // prepare table model of the collected tokens
-    val tableModel: TableModel = new CarbonSpark2SqlParser().prepareTableModel(
+    val tableModel: TableModel = CarbonParserUtil.prepareTableModel(
       ifNotExistPresent = ifNotExistsSet,
-      new CarbonSpark2SqlParser().convertDbNameToLowerCase(tableIdentifier.database),
+      CarbonParserUtil.convertDbNameToLowerCase(tableIdentifier.database),
       tableIdentifier.table.toLowerCase,
       fields,
       partitionerFields,
diff --git a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/SummaryDatasetCatalog.scala b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/SummaryDatasetCatalog.scala
index 19e42a6..873c1f7 100644
--- a/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/SummaryDatasetCatalog.scala
+++ b/datamap/mv/core/src/main/scala/org/apache/carbondata/mv/rewrite/SummaryDatasetCatalog.scala
@@ -24,7 +24,7 @@ import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.expressions.Attribute
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 import org.apache.spark.sql.execution.datasources.FindDataSourceTable
-import org.apache.spark.sql.parser.CarbonSpark2SqlParser
+import org.apache.spark.sql.parser.{CarbonSpark2SqlParser, CarbonSparkSqlParserUtil}
 import org.apache.spark.sql.util.SparkSQLUtil
 
 import org.apache.carbondata.core.datamap.DataMapCatalog
@@ -107,7 +107,6 @@ private[mv] class SummaryDatasetCatalog(sparkSession: SparkSession)
    */
   private[mv] def registerSchema(dataMapSchema: DataMapSchema): Unit = {
     writeLock {
-      val updatedQuery = parser.addMVSkipFunction(dataMapSchema.getCtasQuery)
       val currentDatabase = sparkSession match {
         case carbonSession: CarbonSession =>
           carbonSession.sessionState.catalog.getCurrentDatabase
@@ -118,13 +117,13 @@ private[mv] class SummaryDatasetCatalog(sparkSession: SparkSession)
       // catalog, if the datamap is in database other than sparkSession.currentDataBase(), then it
       // fails to register, so set the database present in the dataMapSchema Object
       setCurrentDataBase(dataMapSchema.getRelationIdentifier.getDatabaseName)
-      val query = sparkSession.sql(updatedQuery)
+      val mvPlan = CarbonSparkSqlParserUtil.getMVPlan(dataMapSchema.getCtasQuery, sparkSession)
       // here setting back to current database of current session, because if the actual query
       // contains db name in query like, select db1.column1 from table and current database is
       // default and if we drop the db1, still the session has current db as db1.
       // So setting back to current database.
       setCurrentDataBase(currentDatabase)
-      val planToRegister = MVHelper.dropDummFuc(query.queryExecution.analyzed)
+      val planToRegister = MVHelper.dropDummFuc(mvPlan)
       val modularPlan =
         mvSession.sessionState.modularizer.modularize(
           mvSession.sessionState.optimizer.execute(planToRegister)).next().semiHarmonized
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCoalesceTestCase.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCoalesceTestCase.scala
index 5310cc8..54487d9 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCoalesceTestCase.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCoalesceTestCase.scala
@@ -19,10 +19,10 @@ package org.apache.carbondata.mv.rewrite
 import org.apache.spark.sql.Row
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-class MVCoalesceTestCase  extends CarbonQueryTest with BeforeAndAfterAll  {
+class MVCoalesceTestCase  extends QueryTest with BeforeAndAfterAll  {
   override def beforeAll(): Unit = {
     drop()
     sql("create table coalesce_test_main(id int,name string,height int,weight int) " +
@@ -43,7 +43,7 @@ class MVCoalesceTestCase  extends CarbonQueryTest with BeforeAndAfterAll  {
     sql("rebuild datamap coalesce_test_main_mv")
 
     val frame = sql("select coalesce(sum(id),0) as sumid,name from coalesce_test_main group by name")
-    assert(TestUtil.verifyMVDataMap(frame.queryExecution.analyzed, "coalesce_test_main_mv"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "coalesce_test_main_mv"))
     checkAnswer(frame, Seq(Row(3, "tom"), Row(3, "lily")))
 
     sql("drop datamap if exists coalesce_test_main_mv")
@@ -59,7 +59,7 @@ class MVCoalesceTestCase  extends CarbonQueryTest with BeforeAndAfterAll  {
     assert("MV doesn't support Coalesce".equals(exception.getMessage))
 
     val frame = sql("select coalesce(sum(id),0) as sumid,name from coalesce_test_main group by name")
-    assert(!TestUtil.verifyMVDataMap(frame.queryExecution.analyzed, "coalesce_test_main_mv"))
+    assert(!TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "coalesce_test_main_mv"))
     checkAnswer(frame, Seq(Row(3, "tom"), Row(3, "lily")))
 
     sql("drop datamap if exists coalesce_test_main_mv")
@@ -72,7 +72,7 @@ class MVCoalesceTestCase  extends CarbonQueryTest with BeforeAndAfterAll  {
     sql("rebuild datamap coalesce_test_main_mv")
 
     val frame = sql("select sum(coalesce(id,0)) as sumid,name from coalesce_test_main group by name")
-    assert(TestUtil.verifyMVDataMap(frame.queryExecution.analyzed, "coalesce_test_main_mv"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "coalesce_test_main_mv"))
     checkAnswer(frame, Seq(Row(3, "tom"), Row(3, "lily")))
 
     sql("drop datamap if exists coalesce_test_main_mv")
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCountAndCaseTestCase.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCountAndCaseTestCase.scala
index 2c13a6e..5f9734f 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCountAndCaseTestCase.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCountAndCaseTestCase.scala
@@ -16,12 +16,10 @@
  */
 package org.apache.carbondata.mv.rewrite
 
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-class MVCountAndCaseTestCase  extends CarbonQueryTest with BeforeAndAfterAll{
+class MVCountAndCaseTestCase  extends QueryTest with BeforeAndAfterAll{
 
 
   override def beforeAll(): Unit = {
@@ -79,7 +77,7 @@ class MVCountAndCaseTestCase  extends CarbonQueryTest with BeforeAndAfterAll{
                        | GROUP BY MT.`3600`, MT.`2250410101`
                        | ORDER BY `3600` ASC LIMIT 5000""".stripMargin)
 
-    assert(TestUtil.verifyMVDataMap(frame.queryExecution.analyzed, "data_table_mv"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "data_table_mv"))
   }
 
   override def afterAll(): Unit = {
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
index 032d8dd..6f17fdc 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVCreateTestCase.scala
@@ -20,17 +20,16 @@ import java.io.File
 import java.nio.file.{Files, Paths}
 
 import org.apache.spark.sql.{CarbonEnv, Row}
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.core.util.path.CarbonTablePath
 import org.apache.carbondata.spark.exception.ProcessMetaDataException
 
-class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
+class MVCreateTestCase extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll {
     drop()
@@ -46,7 +45,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
@@ -56,7 +55,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table2 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
@@ -67,7 +66,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table3 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table3 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
@@ -78,7 +77,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table4 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table4 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
@@ -88,7 +87,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table5 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
@@ -99,7 +98,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table6 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
@@ -109,8 +108,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("drop datamap if exists datamap1")
     sql("create datamap datamap1 using 'mv' as select empname, designation from fact_table1")
     val df = sql("select empname,designation from fact_table1")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
     checkAnswer(df, sql("select empname,designation from fact_table2"))
     sql(s"drop datamap datamap1")
   }
@@ -119,8 +117,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("drop datamap if exists datamap2")
     sql("create datamap datamap2 using 'mv' as select empname, designation from fact_table1")
     val df = sql("select empname from fact_table1")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap2"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap2"))
     checkAnswer(df, sql("select empname from fact_table2"))
     sql(s"drop datamap datamap2")
   }
@@ -129,8 +126,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("drop datamap if exists datamap3")
     sql("create datamap datamap3 using 'mv' as select empname, designation from fact_table1")
     val frame = sql("select empname, designation from fact_table1 where empname='shivani'")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap3"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap3"))
 
     checkAnswer(frame, sql("select empname, designation from fact_table2 where empname='shivani'"))
     sql(s"drop datamap datamap3")
@@ -139,8 +135,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
   test("test create datamap with simple and sub projection with non projection filter") {
     sql("create datamap datamap4 using 'mv' as select empname, designation from fact_table1")
     val frame = sql("select designation from fact_table1 where empname='shivani'")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap4"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap4"))
     checkAnswer(frame, sql("select designation from fact_table2 where empname='shivani'"))
     sql(s"drop datamap datamap4")
   }
@@ -148,8 +143,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
   test("test create datamap with simple and sub projection with datamap filter") {
     sql("create datamap datamap5 using 'mv' as select empname, designation from fact_table1 where empname='shivani'")
     val frame = sql("select designation from fact_table1 where empname='shivani'")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap5"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap5"))
     checkAnswer(frame, sql("select designation from fact_table2 where empname='shivani'"))
     sql(s"drop datamap datamap5")
   }
@@ -157,8 +151,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
   test("test create datamap with simple and same projection with datamap filter ") {
     sql("create datamap datamap6 using 'mv' as select empname, designation from fact_table1 where empname='shivani'")
     val frame = sql("select empname,designation from fact_table1 where empname='shivani'")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap6"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap6"))
     checkAnswer(frame, sql("select empname,designation from fact_table2 where empname='shivani'"))
     sql(s"drop datamap datamap6")
   }
@@ -167,8 +160,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap7 using 'mv' as select empname, designation from fact_table1 where empname='shivani'")
     val frame = sql(
       "select empname,designation from fact_table1 where empname='shivani' and designation='SA'")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap7"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap7"))
     checkAnswer(frame, sql("select empname,designation from fact_table2 where empname='shivani' and designation='SA'"))
     sql(s"drop datamap datamap7")
   }
@@ -176,8 +168,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
   test("test create datamap with simple and same projection with datamap filter and different column filter") {
     sql("create datamap datamap8 using 'mv' as select empname, designation from fact_table1 where empname='shivani'")
     val frame = sql("select empname,designation from fact_table1 where designation='SA'")
-    val analyzed = frame.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed, "datamap8"))
+    assert(!TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap8"))
     checkAnswer(frame, sql("select empname,designation from fact_table2 where designation='SA'"))
     sql(s"drop datamap datamap8")
   }
@@ -185,8 +176,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
   test("test create datamap with simple and same projection with datamap filter on non projection column and extra column filter") {
     sql("create datamap datamap9 using 'mv' as select empname, designation,deptname  from fact_table1 where deptname='cloud'")
     val frame = sql("select empname,designation from fact_table1 where deptname='cloud'")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap9"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap9"))
     checkAnswer(frame, sql("select empname,designation from fact_table2 where deptname='cloud'"))
     sql(s"drop datamap datamap9")
   }
@@ -194,8 +184,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
   test("test create datamap with simple and same projection with datamap filter on non projection column and no column filter") {
     sql("create datamap datamap10 using 'mv' as select empname, designation,deptname from fact_table1 where deptname='cloud'")
     val frame = sql("select empname,designation from fact_table1")
-    val analyzed = frame.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed, "datamap10"))
+    assert(!TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap10"))
     checkAnswer(frame, sql("select empname,designation from fact_table2"))
     sql(s"drop datamap datamap10")
   }
@@ -203,8 +192,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
   test("test create datamap with simple and same projection with datamap filter on non projection column and different column filter") {
     sql("create datamap datamap11 using 'mv' as select empname, designation,deptname from fact_table1 where deptname='cloud'")
     val frame = sql("select empname,designation from fact_table1 where designation='SA'")
-    val analyzed = frame.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed, "datamap11"))
+    assert(!TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap11"))
     checkAnswer(frame, sql("select empname,designation from fact_table2 where designation='SA'"))
     sql(s"drop datamap datamap11")
   }
@@ -213,8 +201,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("drop datamap if exists datamap12")
     sql("create datamap datamap12 using 'mv' as select empname, sum(utilization) from fact_table1 group by empname")
     val frame = sql("select empname, sum(utilization) from fact_table1 group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap12"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap12"))
     checkAnswer(frame, sql("select empname, sum(utilization) from fact_table2 group by empname"))
     sql(s"drop datamap datamap12")
   }
@@ -223,8 +210,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("drop datamap if exists datamap13")
     sql("create datamap datamap13 using 'mv' as select empname, sum(utilization) from fact_table1 group by empname")
     val frame = sql("select sum(utilization) from fact_table1 group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap13"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap13"))
     checkAnswer(frame, sql("select sum(utilization) from fact_table2 group by empname"))
     sql(s"drop datamap datamap13")
   }
@@ -234,8 +220,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap14 using 'mv' as select empname, sum(utilization) from fact_table1 group by empname")
     val frame = sql(
       "select empname,sum(utilization) from fact_table1 group by empname having empname='shivani'")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap14"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap14"))
     checkAnswer(frame, sql("select empname,sum(utilization) from fact_table2 where empname='shivani' group by empname"))
     sql(s"drop datamap datamap14")
   }
@@ -245,8 +230,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap32 using 'mv' as select empname, sum(utilization) from fact_table1 group by empname")
     val frame = sql(
       "select empname, sum(utilization) from fact_table1 group by empname having empname='shivani'")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap32"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap32"))
     checkAnswer(frame, sql( "select empname, sum(utilization) from fact_table2 group by empname having empname='shivani'"))
     sql(s"drop datamap datamap32")
   }
@@ -255,8 +239,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap15 using 'mv' as select empname, sum(utilization) from fact_table1 where empname='shivani' group by empname")
     val frame = sql(
       "select empname,sum(utilization) from fact_table1 where empname='shivani' group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap15"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap15"))
     checkAnswer(frame, sql("select empname,sum(utilization) from fact_table2 where empname='shivani' group by empname"))
     sql(s"drop datamap datamap15")
   }
@@ -264,8 +247,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
   test("test create datamap with simple and sub group by query with filter on datamap and no filter on query") {
     sql("create datamap datamap16 using 'mv' as select empname, sum(utilization) from fact_table1 where empname='shivani' group by empname")
     val frame = sql("select empname,sum(utilization) from fact_table1 group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed, "datamap16"))
+    assert(!TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap16"))
     checkAnswer(frame, sql("select empname,sum(utilization) from fact_table2 group by empname"))
     sql(s"drop datamap datamap16")
   }
@@ -275,8 +257,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select empname, sum(CASE WHEN utilization=27 THEN deptno ELSE 0 END) from fact_table1 group" +
       " by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap17"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap17"))
     checkAnswer(frame, sql("select empname, sum(CASE WHEN utilization=27 THEN deptno ELSE 0 END) from fact_table2 group" +
                            " by empname"))
     sql(s"drop datamap datamap17")
@@ -287,8 +268,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap18 using 'mv' as select empname, sum(CASE WHEN utilization=27 THEN deptno ELSE 0 END) from fact_table1 group by empname")
     val frame = sql(
       "select sum(CASE WHEN utilization=27 THEN deptno ELSE 0 END) from fact_table1 group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap18"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap18"))
     checkAnswer(frame, sql("select sum(CASE WHEN utilization=27 THEN deptno ELSE 0 END) from fact_table2 group by empname"))
     sql(s"drop datamap datamap18")
   }
@@ -298,8 +278,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap19 using 'mv' as select empname, count(CASE WHEN utilization=27 THEN deptno ELSE 0 END) from fact_table1 group by empname")
     val frame = sql(
       "select count(CASE WHEN utilization=27 THEN deptno ELSE 0 END) from fact_table1 group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap19"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap19"))
     checkAnswer(frame, sql("select count(CASE WHEN utilization=27 THEN deptno ELSE 0 END) from fact_table2 group by empname"))
     sql(s"drop datamap datamap19")
   }
@@ -310,8 +289,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select sum(CASE WHEN utilization=27 THEN deptno ELSE 0 END) from fact_table1 where " +
       "empname='shivani' group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap20"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap20"))
     checkAnswer(frame, sql("select sum(CASE WHEN utilization=27 THEN deptno ELSE 0 END) from fact_table2 where " +
                            "empname='shivani' group by empname"))
     sql(s"drop datamap datamap20")
@@ -322,8 +300,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap21 using 'mv' as select t1.empname as c1, t2.designation, t2.empname as c2 from fact_table1 t1 inner join fact_table2 t2  on (t1.empname = t2.empname)")
     val frame = sql(
       "select t1.empname as c1, t2.designation from fact_table1 t1,fact_table2 t2 where t1.empname = t2.empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap21"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap21"))
     checkAnswer(frame, sql("select t1.empname, t2.designation from fact_table4 t1,fact_table5 t2 where t1.empname = t2.empname"))
     sql(s"drop datamap datamap21")
   }
@@ -334,8 +311,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, t2.designation from fact_table1 t1,fact_table2 t2 where t1.empname = " +
       "t2.empname and t1.empname='shivani'")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap22"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap22"))
     checkAnswer(frame, sql("select t1.empname, t2.designation from fact_table4 t1,fact_table5 t2 where t1.empname = " +
                            "t2.empname and t1.empname='shivani'"))
     sql(s"drop datamap datamap22")
@@ -348,8 +324,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, t2.designation from fact_table1 t1,fact_table2 t2 where t1.empname = " +
       "t2.empname and t1.empname='shivani'")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap23"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap23"))
     checkAnswer(frame, sql("select t1.empname, t2.designation from fact_table4 t1,fact_table5 t2 where t1.empname = " +
                            "t2.empname and t1.empname='shivani'"))
     sql(s"drop datamap datamap23")
@@ -360,8 +335,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap24 using 'mv' as select t1.empname, t2.designation, t2.empname from fact_table1 t1 inner join fact_table2 t2 on (t1.empname = t2.empname) where t1.empname='shivani'")
     val frame = sql(
       "select t1.empname, t2.designation from fact_table1 t1,fact_table2 t2 where t1.empname = t2.empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed, "datamap24"))
+    assert(!TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap24"))
     checkAnswer(frame, sql("select t1.empname, t2.designation from fact_table4 t1,fact_table5 t2 where t1.empname = t2.empname"))
     sql(s"drop datamap datamap24")
   }
@@ -371,12 +345,10 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap25 using 'mv' as select t1.empname as c1, t2.designation, t2.empname, t3.empname from fact_table1 t1 inner join fact_table2 t2 on (t1.empname = t2.empname) inner join fact_table3 t3  on (t1.empname=t3.empname)")
     val frame = sql(
       "select t1.empname as c1, t2.designation from fact_table1 t1,fact_table2 t2 where t1.empname = t2.empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed, "datamap25"))
+    assert(!TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap25"))
     val frame1 = sql(
       "select t1.empname as c1, t2.designation from fact_table1 t1 inner join fact_table2 t2 on (t1.empname = t2.empname) inner join fact_table3 t3  on (t1.empname=t3.empname)")
-    val analyzed1 = frame1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "datamap25"))
+    assert(TestUtil.verifyMVDataMap(frame1.queryExecution.optimizedPlan, "datamap25"))
     checkAnswer(frame, sql("select t1.empname, t2.designation from fact_table4 t1,fact_table5 t2 where t1.empname = t2.empname"))
     sql(s"drop datamap datamap25")
   }
@@ -386,8 +358,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, t2.designation from fact_table1 t1,fact_table2 t2,fact_table3 " +
       "t3  where t1.empname = t2.empname and t1.empname=t3.empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap26"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap26"))
     checkAnswer(frame, sql("select t1.empname, t2.designation from fact_table4 t1,fact_table5 t2,fact_table6 " +
                            "t3  where t1.empname = t2.empname and t1.empname=t3.empname"))
     sql(s"drop datamap datamap26")
@@ -398,8 +369,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, t2.designation, sum(t1.utilization) from fact_table1 t1,fact_table2 t2  " +
       "where t1.empname = t2.empname group by t1.empname, t2.designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap27"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap27"))
     checkAnswer(frame, sql("select t1.empname, t2.designation, sum(t1.utilization) from fact_table4 t1,fact_table5 t2  " +
                            "where t1.empname = t2.empname group by t1.empname, t2.designation"))
     sql(s"drop datamap datamap27")
@@ -411,8 +381,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t2.designation, sum(t1.utilization) from fact_table1 t1,fact_table2 t2  where " +
       "t1.empname = t2.empname group by t2.designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap28"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap28"))
     checkAnswer(frame, sql("select t2.designation, sum(t1.utilization) from fact_table4 t1,fact_table5 t2  where " +
                            "t1.empname = t2.empname group by t2.designation"))
     sql(s"drop datamap datamap28")
@@ -424,8 +393,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t2.designation, sum(t1.utilization) from fact_table1 t1,fact_table2 t2  where " +
       "t1.empname = t2.empname and t1.empname='shivani' group by t2.designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap29"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap29"))
     checkAnswer(frame, sql("select t2.designation, sum(t1.utilization) from fact_table4 t1,fact_table5 t2  where " +
                            "t1.empname = t2.empname and t1.empname='shivani' group by t2.designation"))
     sql(s"drop datamap datamap29")
@@ -437,8 +405,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname ,t2.designation, sum(t1.utilization) from fact_table1 t1,fact_table2 t2  where " +
       "t1.empname = t2.empname and t1.empname='shivani' group by t2.designation,t1.empname ")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap29"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap29"))
     checkAnswer(frame, sql("select t1.empname ,t2.designation, sum(t1.utilization) from fact_table4 t1,fact_table5 t2  where " +
                            "t1.empname = t2.empname and t1.empname='shivani' group by t2.designation,t1.empname "))
     sql(s"drop datamap datamap29")
@@ -450,8 +417,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname ,t2.designation, sum(t1.utilization) from fact_table1 t1,fact_table2 t2  where " +
       "t1.empname = t2.empname and t1.empname='shivani' group by t2.designation,t1.empname ")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap29"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap29"))
     checkAnswer(frame, sql("select t1.empname ,t2.designation, sum(t1.utilization) from fact_table4 t1,fact_table5 t2  where " +
                            "t1.empname = t2.empname and t1.empname='shivani' group by t2.designation,t1.empname "))
     sql(s"drop datamap datamap29")
@@ -463,8 +429,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, t2.designation, sum(t1.utilization) from fact_table1 t1,fact_table2 t2  " +
       "where t1.empname = t2.empname and t2.designation='SA' group by t1.empname, t2.designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap30"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap30"))
     checkAnswer(frame, sql("select t1.empname, t2.designation, sum(t1.utilization) from fact_table4 t1,fact_table5 t2  " +
                            "where t1.empname = t2.empname and t2.designation='SA' group by t1.empname, t2.designation"))
     sql(s"drop datamap datamap30")
@@ -475,8 +440,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap31 using 'mv' as select empname, designation, utilization, projectcode from fact_table1 ")
     val frame = sql(
       "select empname, designation, utilization+projectcode from fact_table1")
-    val analyzed = frame.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed, "datamap31"))
+    assert(!TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap31"))
     checkAnswer(frame, sql("select empname, designation, utilization+projectcode from fact_table2"))
     sql(s"drop datamap datamap31")
   }
@@ -485,8 +449,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap32")
     sql("create datamap datamap32 using 'mv' as select empname, count(utilization) from fact_table1 group by empname")
     val frame = sql("select empname,count(utilization) from fact_table1 where empname='shivani' group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap32"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap32"))
     checkAnswer(frame, sql("select empname,count(utilization) from fact_table2 where empname='shivani' group by empname"))
     sql(s"drop datamap datamap32")
   }
@@ -495,8 +458,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap33")
     sql("create datamap datamap33 using 'mv' as select empname, avg(utilization) from fact_table1 group by empname")
     val frame = sql("select empname,avg(utilization) from fact_table1 where empname='shivani' group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap33"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap33"))
     checkAnswer(frame, sql("select empname,avg(utilization) from fact_table2 where empname='shivani' group by empname"))
     sql(s"drop datamap datamap33")
   }
@@ -507,8 +469,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, t2.designation, sum(t1.utilization) from fact_table1 t1 left join fact_table2 t2  " +
       "on t1.empname = t2.empname group by t1.empname, t2.designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap34"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap34"))
     checkAnswer(frame, sql("select t1.empname, t2.designation, sum(t1.utilization) from fact_table4 t1 left join fact_table5 t2  " +
                            "on t1.empname = t2.empname group by t1.empname, t2.designation"))
     sql(s"drop datamap datamap34")
@@ -518,8 +479,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap35 using 'mv' as select designation, sum(utilization) from fact_table1 where empname='shivani' group by designation")
     val frame = sql(
       "select designation, sum(utilization) from fact_table1 where empname='shivani' group by designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap35"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap35"))
     checkAnswer(frame, sql("select designation, sum(utilization) from fact_table2 where empname='shivani' group by designation"))
     sql(s"drop datamap datamap35")
   }
@@ -528,8 +488,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap36 using 'mv' as select designation, sum(utilization) from fact_table1 where empname='shivani' group by designation")
     val frame = sql(
       "select sum(utilization) from fact_table1 where empname='shivani' group by designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap36"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap36"))
     checkAnswer(frame, sql("select sum(utilization) from fact_table2 where empname='shivani' group by designation"))
     sql(s"drop datamap datamap36")
   }
@@ -540,8 +499,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, sum(t1.utilization) from fact_table1 t1,fact_table2 t2  " +
       "where t1.empname = t2.empname group by t1.empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap37"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap37"))
     checkAnswer(frame, sql("select t1.empname, sum(t1.utilization) from fact_table3 t1,fact_table4 t2  " +
                            "where t1.empname = t2.empname group by t1.empname, t1.designation"))
     sql(s"drop datamap datamap37")
@@ -553,8 +511,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, t1.designation, sum(t1.utilization) from fact_table1 t1,fact_table2 t2  " +
       "where t1.empname = t2.empname group by t1.empname,t1.designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap38"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap38"))
     checkAnswer(frame, sql("select t1.empname,t1.designation, sum(t1.utilization) from fact_table3 t1,fact_table4 t2  " +
                            "where t1.empname = t2.empname group by t1.empname, t1.designation"))
     sql(s"drop datamap datamap38")
@@ -566,8 +523,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, t1.designation, sum(t1.utilization) from fact_table1 t1,fact_table2 t2  " +
       "where t1.empname = t2.empname and t1.empname='shivani' group by t1.empname,t1.designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap39"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap39"))
     checkAnswer(frame, sql("select t1.empname,t1.designation, sum(t1.utilization) from fact_table3 t1,fact_table4 t2  " +
                            "where t1.empname = t2.empname and t1.empname='shivani' group by t1.empname, t1.designation"))
     sql(s"drop datamap datamap39")
@@ -579,8 +535,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, t1.designation, sum(t1.utilization),count(t1.utilization) from fact_table1 t1,fact_table2 t2  " +
       "where t1.empname = t2.empname and t1.empname='shivani' group by t1.empname,t1.designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap40"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap40"))
     checkAnswer(frame, sql("select t1.empname, t1.designation, sum(t1.utilization),count(t1.utilization) from fact_table3 t1,fact_table4 t2  " +
                            "where t1.empname = t2.empname and t1.empname='shivani' group by t1.empname,t1.designation"))
     sql(s"drop datamap datamap40")
@@ -592,8 +547,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, t2.designation, sum(t1.utilization) from fact_table1 t1 left join fact_table2 t2  " +
       "on t1.empname = t2.empname where t1.empname='shivani' group by t1.empname, t2.designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap41"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap41"))
     checkAnswer(frame, sql("select t1.empname, t2.designation, sum(t1.utilization) from fact_table4 t1 left join fact_table5 t2  " +
                            "on t1.empname = t2.empname where t1.empname='shivani' group by t1.empname, t2.designation"))
     sql(s"drop datamap datamap41")
@@ -605,8 +559,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, sum(t1.utilization) from fact_table1 t1 left join fact_table2 t2  " +
       "on t1.empname = t2.empname group by t1.empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap42"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap42"))
     checkAnswer(frame, sql("select t1.empname, sum(t1.utilization) from fact_table4 t1 left join fact_table5 t2  " +
                            "on t1.empname = t2.empname group by t1.empname"))
     sql(s"drop datamap datamap42")
@@ -618,8 +571,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, sum(t1.utilization) from fact_table1 t1 left join fact_table2 t2  " +
       "on t1.empname = t2.empname where t1.empname='shivani' group by t1.empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap43"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap43"))
     checkAnswer(frame, sql("select t1.empname, sum(t1.utilization) from fact_table4 t1 left join fact_table5 t2  " +
                            "on t1.empname = t2.empname where t1.empname='shivani' group by t1.empname"))
     sql(s"drop datamap datamap43")
@@ -631,8 +583,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, sum(t1.utilization) from fact_table1 t1 left join fact_table2 t2  " +
       "on t1.empname = t2.empname where t1.empname='shivani' group by t1.empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap44"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap44"))
     checkAnswer(frame, sql("select t1.empname, sum(t1.utilization) from fact_table4 t1 left join fact_table5 t2  " +
                            "on t1.empname = t2.empname where t1.empname='shivani' group by t1.empname"))
     sql(s"drop datamap datamap44")
@@ -646,8 +597,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val frame = sql(
       "select t1.empname, t2.designation, sum(t1.utilization) from fact_table1 t1 left join fact_table2 t2  " +
       "on t1.empname = t2.empname where t2.designation='SA' group by t1.empname, t2.designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap45"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap45"))
     checkAnswer(frame, sql("select t1.empname, t2.designation, sum(t1.utilization) from fact_table4 t1 left join fact_table5 t2  " +
                            "on t1.empname = t2.empname where t2.designation='SA' group by t1.empname, t2.designation"))
     sql(s"drop datamap datamap45")
@@ -657,14 +607,13 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
 
     sql("drop datamap if exists mv13")
     sql("drop table if exists test4")
-    sql("create table test4 ( name string,age int,salary int) stored by 'carbondata'")
+    sql("create table test4 ( name string,age int,salary int) STORED AS carbondata")
 
     sql(" insert into test4 select 'babu',12,12").show()
     sql("create datamap mv13 using 'mv' as select name,sum(salary) from test4 group by name")
     val frame = sql(
       "select name,sum(salary) from test4 group by name")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "mv13"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "mv13"))
   }
 
   test("jira carbondata-2528-1") {
@@ -673,8 +622,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap MV_order using 'mv' as select empname,sum(salary) as total from fact_table1 group by empname")
     val frame = sql(
       "select empname,sum(salary) as total from fact_table1 group by empname order by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "MV_order"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "MV_order"))
   }
 
   test("jira carbondata-2528-2") {
@@ -683,8 +631,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap MV_order using 'mv' as select empname,sum(salary)+sum(utilization) as total from fact_table1 group by empname")
     val frame = sql(
       "select empname,sum(salary)+sum(utilization) as total from fact_table1 group by empname order by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "MV_order"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "MV_order"))
   }
 
   test("jira carbondata-2528-3") {
@@ -693,8 +640,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap MV_order using 'mv' as select empname,sum(salary)+sum(utilization) as total from fact_table1 group by empname order by empname DESC")
     val frame = sql(
       "select empname,sum(salary)+sum(utilization) as total from fact_table1 group by empname order by empname DESC")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "MV_order"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "MV_order"))
     sql("drop datamap if exists MV_order")
   }
 
@@ -704,8 +650,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap MV_order using 'mv' as select empname,sum(salary)+sum(utilization) as total from fact_table1 group by empname order by empname DESC")
     val frame = sql(
       "select empname,sum(salary)+sum(utilization) as total from fact_table1 where empname = 'ravi' group by empname order by empname DESC")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "MV_order"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "MV_order"))
     sql("drop datamap if exists MV_order")
   }
 
@@ -713,16 +658,14 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
 
     sql("drop table if exists test1")
     sql("drop datamap if exists datamv2")
-    sql("create table test1( name string,country string,age int,salary int) stored by 'carbondata'")
+    sql("create table test1( name string,country string,age int,salary int) STORED AS carbondata")
     sql("insert into test1 select 'name1','USA',12,23")
     sql("create datamap datamv2 using 'mv' as select country,sum(salary) from test1 group by country")
     val frame = sql("select country,sum(salary) from test1 where country='USA' group by country")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamv2"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamv2"))
     sql("insert into test1 select 'name1','USA',12,23")
     val frame1 = sql("select country,sum(salary) from test1 where country='USA' group by country")
-    val analyzed1 = frame1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "datamv2"))
+    assert(TestUtil.verifyMVDataMap(frame1.queryExecution.optimizedPlan, "datamv2"))
     sql("drop datamap if exists datamv2")
     sql("drop table if exists test1")
   }
@@ -733,8 +676,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap MV_exp using 'mv' as select sum(salary),substring(empname,2,5),designation from fact_table1 group by substring(empname,2,5),designation")
     val frame = sql(
       "select sum(salary),substring(empname,2,5),designation from fact_table1 group by substring(empname,2,5),designation")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "MV_exp"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "MV_exp"))
     sql("drop datamap if exists MV_exp")
   }
 
@@ -747,14 +689,13 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists MV_exp")
     sql("create datamap MV_exp using 'mv' as select doj,sum(salary) from xy.fact_tablexy group by doj")
     val frame = sql(
       "select doj,sum(salary) from xy.fact_tablexy group by doj")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "MV_exp"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "MV_exp"))
     sql("drop datamap if exists MV_exp")
     sql("""drop database if exists xy cascade""")
   }
@@ -763,15 +704,14 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
 
     sql("drop table if exists mvtable1")
     sql("drop datamap if exists map1")
-    sql("create table mvtable1(name string,age int,salary int) stored by 'carbondata'")
+    sql("create table mvtable1(name string,age int,salary int) STORED AS carbondata")
     sql(" insert into mvtable1 select 'n1',12,12")
     sql("  insert into mvtable1 select 'n1',12,12")
     sql(" insert into mvtable1 select 'n3',12,12")
     sql(" insert into mvtable1 select 'n4',12,12")
     sql("create datamap map1 using 'mv' as select name,sum(salary) from mvtable1 group by name")
     val frame = sql("select name,sum(salary) from mvtable1 group by name limit 1")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "map1"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "map1"))
     sql("drop datamap if exists map1")
     sql("drop table if exists mvtable1")
   }
@@ -782,8 +722,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap_comp_maxsumminavg using 'mv' as select empname,max(projectenddate),sum(salary),min(projectjoindate),avg(attendance) from fact_table1 group by empname")
     val frame = sql(
       "select empname,max(projectenddate),sum(salary),min(projectjoindate),avg(attendance) from fact_table1 group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_comp_maxsumminavg"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap_comp_maxsumminavg"))
     sql("drop datamap if exists datamap_comp_maxsumminavg")
   }
 
@@ -806,8 +745,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
 
       val frame = sql(
         "select sum(case when deptno=11 and (utilization=92) then salary else 0 end) as t from fact_table1 group by empname")
-      val analyzed = frame.queryExecution.analyzed
-      assert(TestUtil.verifyMVDataMap(analyzed, "MV_exp"))
+      assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "MV_exp"))
     }
     sql("drop datamap if exists MV_exp")
   }
@@ -824,8 +762,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("show datamap").show()
     val frame = sql(
       "select empname, sum(utilization) from fact_table1 group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "MV_exp1"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "MV_exp1"))
     sql("drop datamap if exists MV_exp1")
     sql("drop datamap if exists MV_exp2")
   }
@@ -836,8 +773,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap46 using 'mv' as select deptname, sum(salary) from fact_table1 group by deptname")
     val frame = sql(
       "select deptname as babu, sum(salary) from fact_table1 as tt group by deptname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap46"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap46"))
     sql("drop datamap if exists datamap46")
   }
 
@@ -847,8 +783,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap_subqry using 'mv' as select empname, min(salary) from fact_table1 group by empname")
     val frame = sql(
       "SELECT max(utilization) FROM fact_table1 WHERE salary IN (select min(salary) from fact_table1 group by empname ) group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_subqry"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap_subqry"))
     sql("drop datamap if exists datamap_subqry")
   }
 
@@ -859,8 +794,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap_subqry using 'mv' as select min(salary) from fact_table1")
     val frame = sql(
       "SELECT max(utilization) FROM fact_table1 WHERE salary IN (select min(salary) from fact_table1) group by empname")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_subqry"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "datamap_subqry"))
     sql("drop datamap if exists datamap_subqry")
   }
 
@@ -868,8 +802,8 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
 
     sql("drop table if exists mvtable1")
     sql("drop table if exists mvtable2")
-    sql("create table mvtable1(name string,age int,salary int) stored by 'carbondata'")
-    sql("create table mvtable2(name string,age int,salary int) stored by 'carbondata'")
+    sql("create table mvtable1(name string,age int,salary int) STORED AS carbondata")
+    sql("create table mvtable2(name string,age int,salary int) STORED AS carbondata")
     sql("create datamap MV11 using 'mv' as select name from mvtable2")
     sql(" insert into mvtable1 select 'n1',12,12")
     sql("  insert into mvtable1 select 'n1',12,12")
@@ -891,7 +825,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('streaming'='true')
       """.stripMargin)
     sql(
@@ -919,7 +853,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('streaming'='true')
       """.stripMargin)
     sql(
@@ -964,14 +898,13 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
        | create table all_table(x1 bigint,x2 bigint,
        | x3 string,x4 bigint,x5 bigint,x6 int,x7 string,x8 int, x9 int,x10 bigint,
        | x11 bigint, x12 bigint,x13 bigint,x14 bigint,x15 bigint,x16 bigint,
-       | x17 bigint,x18 bigint,x19 bigint) stored by 'carbondata'""".stripMargin)
+       | x17 bigint,x18 bigint,x19 bigint) STORED AS carbondata""".stripMargin)
     sql("insert into all_table select 1,1,null,1,1,1,null,1,1,1,1,1,1,1,1,1,1,1,1")
 
     sql("create datamap all_table_mv on table all_table using 'mv' as " + querySQL)
 
     val frame = sql(querySQL)
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "all_table_mv"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "all_table_mv"))
     assert(1 == frame.collect().size)
 
     sql("drop table if exists all_table")
@@ -979,15 +912,14 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
 
   test("test select * and distinct when MV is enabled") {
     sql("drop table if exists limit_fail")
-    sql("CREATE TABLE limit_fail (empname String, designation String, doj Timestamp,workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int)STORED BY 'org.apache.carbondata.format'")
+    sql("CREATE TABLE limit_fail (empname String, designation String, doj Timestamp,workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,utilization int,salary int)STORED AS carbondata")
     sql(s"LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE limit_fail  OPTIONS" +
         "('DELIMITER'= ',', 'QUOTECHAR'= '\"')")
     sql("create datamap limit_fail_dm1 using 'mv' as select empname,designation from limit_fail")
     try {
       val df = sql("select distinct(empname) from limit_fail limit 10")
       sql("select * from limit_fail limit 10").show()
-      val analyzed = df.queryExecution.analyzed
-      assert(TestUtil.verifyMVDataMap(analyzed, "limit_fail_dm1"))
+      assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "limit_fail_dm1"))
     } catch {
       case ex: Exception =>
         assert(false)
@@ -1006,7 +938,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         | create table all_table(x1 bigint,x2 bigint,
         | x3 string,x4 bigint,x5 bigint,x6 int,x7 string,x8 int, x9 int,x10 bigint,
         | x11 bigint, x12 bigint,x13 bigint,x14 bigint,x15 bigint,x16 bigint,
-        | x17 bigint,x18 bigint,x19 bigint,x20 binary) stored by 'carbondata'""".stripMargin)
+        | x17 bigint,x18 bigint,x19 bigint,x20 binary) STORED AS carbondata""".stripMargin)
     sql("insert into all_table select 1,1,null,1,1,1,null,1,1,1,1,1,1,1,1,1,1,1,1,'binary1'")
     sql("insert into all_table select 1,1,null,1,1,1,null,1,1,1,1,1,1,1,1,1,1,12,2,'binary2'")
     sql("insert into all_table select 1,1,null,1,1,1,null,1,1,1,1,1,1,1,1,1,1,1,2,'binary2'")
@@ -1015,8 +947,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("rebuild datamap all_table_mv")
 
     var frame = sql(querySQL)
-    var analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "all_table_mv"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "all_table_mv"))
     assert(2 == frame.collect().size)
     frame.collect().foreach { each =>
       if (1 == each.get(0)) {
@@ -1031,8 +962,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     }
 
     frame = sql(querySQL2)
-    analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "all_table_mv"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "all_table_mv"))
     assert(1 == frame.collect().size)
     frame.collect().foreach { each =>
       if (2 == each.get(0)) {
@@ -1049,7 +979,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
   test(" test MV with like queries and filter queries") {
     sql("drop table if exists mv_like")
     sql(
-      "create table mv_like(name string, age int, address string, Country string, id int) stored by 'carbondata'")
+      "create table mv_like(name string, age int, address string, Country string, id int) STORED AS carbondata")
     sql(
       "create datamap mvlikedm1 using 'mv' as select name,address from mv_like where Country NOT LIKE 'US' group by name,address")
     sql(
@@ -1057,50 +987,45 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("insert into mv_like select 'chandler', 32, 'newYork', 'US', 5")
     val df1 = sql(
       "select name,address from mv_like where Country NOT LIKE 'US' group by name,address")
-    val analyzed1 = df1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "mvlikedm1"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "mvlikedm1"))
     val df2 = sql(
       "select name,address,Country from mv_like where Country = 'US' or Country = 'China' group by name,address,Country")
-    val analyzed2 = df2.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed2, "mvlikedm2"))
+    assert(TestUtil.verifyMVDataMap(df2.queryExecution.optimizedPlan, "mvlikedm2"))
   }
 
   test("test distinct, count, sum on MV with single projection column") {
     sql("drop table if exists maintable")
-    sql("create table maintable(name string, age int, add string) stored by 'carbondata'")
+    sql("create table maintable(name string, age int, add string) STORED AS carbondata")
     sql("create datamap single_mv using 'mv' as select age from maintable")
     sql("insert into maintable select 'pheobe',31,'NY'")
     sql("insert into maintable select 'rachel',32,'NY'")
     val df1 = sql("select distinct(age) from maintable")
     val df2 = sql("select sum(age) from maintable")
     val df3 = sql("select count(age) from maintable")
-    val analyzed1 = df1.queryExecution.analyzed
-    val analyzed2 = df2.queryExecution.analyzed
-    val analyzed3 = df3.queryExecution.analyzed
     checkAnswer(df1, Seq(Row(31), Row(32)))
     checkAnswer(df2, Seq(Row(63)))
     checkAnswer(df3, Seq(Row(2)))
-    assert(TestUtil.verifyMVDataMap(analyzed1, "single_mv"))
-    assert(TestUtil.verifyMVDataMap(analyzed2, "single_mv"))
-    assert(TestUtil.verifyMVDataMap(analyzed3, "single_mv"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "single_mv"))
+    assert(TestUtil.verifyMVDataMap(df2.queryExecution.optimizedPlan, "single_mv"))
+    assert(TestUtil.verifyMVDataMap(df3.queryExecution.optimizedPlan, "single_mv"))
   }
 
   test("count test case") {
 
     sql("drop table if exists mvtable1")
-    sql("create table mvtable1(name string,age int,salary int) stored by 'carbondata'")
+    sql("create table mvtable1(name string,age int,salary int) STORED AS carbondata")
     sql("create datamap MV11 using 'mv' as select name from mvtable1")
     sql("insert into mvtable1 select 'n1',12,12")
     sql("rebuild datamap MV11")
     val frame = sql("select count(*) from mvtable1")
-    assert(!TestUtil.verifyMVDataMap(frame.queryExecution.analyzed, "MV11"))
+    assert(!TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "MV11"))
     checkAnswer(frame,Seq(Row(1)))
     sql("drop table if exists mvtable1")
   }
 
   test("test mv with duplicate columns in query and constant column") {
     sql("drop table if exists maintable")
-    sql("create table maintable(name string, age int, add string) stored by 'carbondata'")
+    sql("create table maintable(name string, age int, add string) STORED AS carbondata")
     sql("create datamap dupli_mv using 'mv' as select name, sum(age),sum(age) from maintable group by name")
     sql("create datamap dupli_projection using 'mv' as select age, age,add from maintable")
     sql("create datamap constant_mv using 'mv' as select name, sum(1) ex1 from maintable group by name")
@@ -1111,35 +1036,28 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val df4 = sql("select sum(1) ex1 from maintable group by name")
     val df5 = sql("select age,age,add from maintable")
     val df6 = sql("select age,add from maintable")
-    val analyzed1 = df1.queryExecution.analyzed
-    val analyzed2 = df2.queryExecution.analyzed
-    val analyzed3 = df3.queryExecution.analyzed
-    val analyzed4 = df4.queryExecution.analyzed
-    val analyzed5 = df5.queryExecution.analyzed
-    val analyzed6 = df6.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "dupli_mv"))
-    assert(TestUtil.verifyMVDataMap(analyzed2, "dupli_mv"))
-    assert(TestUtil.verifyMVDataMap(analyzed3, "constant_mv"))
-    assert(TestUtil.verifyMVDataMap(analyzed4, "constant_mv"))
-    assert(TestUtil.verifyMVDataMap(analyzed5, "dupli_projection"))
-    assert(TestUtil.verifyMVDataMap(analyzed6, "dupli_projection"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "dupli_mv"))
+    assert(TestUtil.verifyMVDataMap(df2.queryExecution.optimizedPlan, "dupli_mv"))
+    assert(TestUtil.verifyMVDataMap(df3.queryExecution.optimizedPlan, "constant_mv"))
+    assert(TestUtil.verifyMVDataMap(df4.queryExecution.optimizedPlan, "constant_mv"))
+    assert(TestUtil.verifyMVDataMap(df5.queryExecution.optimizedPlan, "dupli_projection"))
+    assert(TestUtil.verifyMVDataMap(df6.queryExecution.optimizedPlan, "dupli_projection"))
   }
 
   test("test mv query when the column names and table name same in join scenario") {
     sql("drop table IF EXISTS price")
     sql("drop table IF EXISTS quality")
-    sql("create table price(product string,price int) stored by 'carbondata'")
-    sql("create table quality(product string,quality string) stored by 'carbondata'")
+    sql("create table price(product string,price int) STORED AS carbondata")
+    sql("create table quality(product string,quality string) STORED AS carbondata")
     sql("create datamap same_mv using 'mv' as select price.product,price.price,quality.product,quality.quality from price,quality where price.product = quality.product")
     val df1 = sql("select price.product from price,quality where price.product = quality.product")
-    val analyzed1 = df1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "same_mv"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "same_mv"))
   }
 
   test("test datamap column having more than 128 characters") {
     sql("drop table IF EXISTS maintable")
     sql("create table maintable (m_month smallint, c_code string, " +
-        "c_country smallint, d_dollar_value double, q_quantity double, u_unit smallint, b_country smallint, i_id int, y_year smallint) stored by 'carbondata'")
+        "c_country smallint, d_dollar_value double, q_quantity double, u_unit smallint, b_country smallint, i_id int, y_year smallint) STORED AS carbondata")
     sql("insert into maintable select 10, 'xxx', 123, 456, 45, 5, 23, 1, 2000")
     sql("drop datamap if exists da_agg")
     sql("create datamap da_agg using 'mv' as select u_unit, y_year, m_month, c_country, b_country, sum(case when i_id=1 and (y_year=2000 and m_month=10)" +
@@ -1148,15 +1066,14 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val df = sql("select u_unit, y_year, m_month, c_country, b_country, sum(case when i_id=1 and (y_year=2000 and m_month=10) then d_dollar_value else 0 end), " +
                  "sum(case when i_id=1 and (y_year=2000 and m_month=10) then q_quantity else 0 end) ex, sum(case when i_id=1 and (y_year=2011 and (m_month>=7 and m_month " +
                  "<=12)) then q_quantity else 0 end) from maintable group by u_unit,y_year, m_month, c_country, b_country")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "da_agg"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "da_agg"))
     sql("drop table IF EXISTS maintable")
   }
 
   test("test cast expression with mv") {
     sql("drop table IF EXISTS maintable")
     sql("create table maintable (m_month bigint, c_code string, " +
-        "c_country smallint, d_dollar_value double, q_quantity double, u_unit smallint, b_country smallint, i_id int, y_year smallint) stored by 'carbondata'")
+        "c_country smallint, d_dollar_value double, q_quantity double, u_unit smallint, b_country smallint, i_id int, y_year smallint) STORED AS carbondata")
     sql("insert into maintable select 10, 'xxx', 123, 456, 45, 5, 23, 1, 2000")
     sql("drop datamap if exists da_cast")
     sql(
@@ -1165,14 +1082,13 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
       " select cast(floor((m_month +1000) / 900) * 900 - 2000 AS INT) as a ,c_code as abc  from maintable")
     val df2 = sql(
       " select cast(floor((m_month +1000) / 900) * 900 - 2000 AS INT),c_code as abc  from maintable")
-    val analyzed1 = df1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "da_cast"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "da_cast"))
   }
 
   test("test cast of expression with mv") {
     sql("drop table IF EXISTS maintable")
     sql("create table maintable (m_month bigint, c_code string, " +
-        "c_country smallint, d_dollar_value double, q_quantity double, u_unit smallint, b_country smallint, i_id int, y_year smallint) stored by 'carbondata'")
+        "c_country smallint, d_dollar_value double, q_quantity double, u_unit smallint, b_country smallint, i_id int, y_year smallint) STORED AS carbondata")
     sql("insert into maintable select 10, 'xxx', 123, 456, 45, 5, 23, 1, 2000")
     sql("drop datamap if exists da_cast")
     sql(
@@ -1181,50 +1097,45 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
       " select cast(floor((m_month +1000) / 900) * 900 - 2000 AS INT) as a ,c_code as abc  from maintable")
     val df2 = sql(
       " select cast(floor((m_month +1000) / 900) * 900 - 2000 AS INT),c_code as abc  from maintable")
-    val analyzed1 = df1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "da_cast"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "da_cast"))
   }
 
   test("test cast with & without alias") {
     sql("drop table IF EXISTS maintable")
     sql("create table maintable (m_month bigint, c_code string, " +
-        "c_country smallint, d_dollar_value double, q_quantity double, u_unit smallint, b_country smallint, i_id int, y_year smallint) stored by 'carbondata'")
+        "c_country smallint, d_dollar_value double, q_quantity double, u_unit smallint, b_country smallint, i_id int, y_year smallint) STORED AS carbondata")
     sql("insert into maintable select 10, 'xxx', 123, 456, 45, 5, 23, 1, 2000")
     sql("drop datamap if exists da_cast")
     sql(
       "create datamap da_cast using 'mv' as select cast(m_month + 1000 AS INT) as a, c_code as abc from maintable")
     checkAnswer(sql("select cast(m_month + 1000 AS INT) as a, c_code as abc from maintable"), Seq(Row(1010, "xxx")))
     var df1 = sql("select cast(m_month + 1000 AS INT) as a, c_code as abc from maintable")
-    var analyzed1 = df1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "da_cast"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "da_cast"))
     sql("drop datamap if exists da_cast")
     sql(
       "create datamap da_cast using 'mv' as select cast(m_month + 1000 AS INT), c_code from maintable")
     df1 = sql("select cast(m_month + 1000 AS INT), c_code from maintable")
-    analyzed1 = df1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "da_cast"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "da_cast"))
     checkAnswer(sql("select cast(m_month + 1000 AS INT), c_code from maintable"), Seq(Row(1010, "xxx")))
   }
 
   test("test mv with floor & ceil exp") {
     sql("drop table IF EXISTS maintable")
     sql("create table maintable (m_month bigint, c_code string, " +
-        "c_country smallint, d_dollar_value double, q_quantity double, u_unit smallint, b_country smallint, i_id int, y_year smallint) stored by 'carbondata'")
+        "c_country smallint, d_dollar_value double, q_quantity double, u_unit smallint, b_country smallint, i_id int, y_year smallint) STORED AS carbondata")
     sql("insert into maintable select 10, 'xxx', 123, 456, 45, 5, 23, 1, 2000")
     sql("drop datamap if exists da_floor")
     sql(
       "create datamap da_floor using 'mv' as select floor(m_month) as a, c_code as abc from maintable")
     checkAnswer(sql("select floor(m_month) as a, c_code as abc from maintable"), Seq(Row(10, "xxx")))
-    var df1 = sql("select floor(m_month) as a, c_code as abc from maintable")
-    var analyzed1 = df1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "da_floor"))
+    val df1 = sql("select floor(m_month) as a, c_code as abc from maintable")
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "da_floor"))
     sql("drop datamap if exists da_ceil")
     sql(
       "create datamap da_ceil using 'mv' as select ceil(m_month) as a, c_code as abc from maintable")
     checkAnswer(sql("select ceil(m_month) as a, c_code as abc from maintable"), Seq(Row(10, "xxx")))
-    var df2 = sql("select ceil(m_month) as a, c_code as abc from maintable")
-    var analyzed2 = df2.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed2, "da_ceil"))
+    val df2 = sql("select ceil(m_month) as a, c_code as abc from maintable")
+    assert(TestUtil.verifyMVDataMap(df2.queryExecution.optimizedPlan, "da_ceil"))
   }
 
   def drop(): Unit = {
@@ -1251,7 +1162,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table_addseg OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
 
@@ -1261,15 +1172,14 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table_addseg1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
 
     sql("drop datamap if exists datamap_addseg")
     sql("create datamap datamap_addseg using 'mv' as select empname, designation from fact_table_addseg")
     val df = sql("select empname,designation from fact_table_addseg")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_addseg"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_addseg"))
     assert(df.collect().length == 90)
     val table = CarbonEnv.getCarbonTable(None, "fact_table_addseg1") (sqlContext.sparkSession)
     val path = CarbonTablePath.getSegmentPath(table.getTablePath, "0")
@@ -1279,8 +1189,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"alter table fact_table_addseg add segment options('path'='$newPath', 'format'='carbon')").show()
     sql("select empname,designation from fact_table_addseg").show()
     val df1 = sql("select empname,designation from fact_table_addseg")
-    val analyzed1 = df1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "datamap_addseg"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "datamap_addseg"))
     assert(df1.collect().length == 180)
     sql(s"drop datamap datamap_addseg")
     FileFactory.deleteAllFilesOfDir(new File(newPath))
@@ -1297,7 +1206,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table_addseg OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
 
@@ -1307,7 +1216,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
         |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
         |  utilization int,salary int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE fact_table_addseg1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""")
 
@@ -1315,8 +1224,7 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap datamap_addseg using 'mv' WITH DEFERRED REBUILD as select empname, designation from fact_table_addseg")
     sql("rebuild datamap datamap_addseg")
     val df = sql("select empname,designation from fact_table_addseg")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_addseg"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_addseg"))
     assert(df.collect().length == 90)
     val table = CarbonEnv.getCarbonTable(None, "fact_table_addseg1") (sqlContext.sparkSession)
     val path = CarbonTablePath.getSegmentPath(table.getTablePath, "0")
@@ -1325,15 +1233,13 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
 
     sql(s"alter table fact_table_addseg add segment options('path'='$newPath', 'format'='carbon')").show()
     val df1 = sql("select empname,designation from fact_table_addseg")
-    val analyzed1 = df1.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed1, "datamap_addseg"))
+    assert(!TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "datamap_addseg"))
     assert(df1.collect().length == 180)
 
     sql("rebuild datamap datamap_addseg")
 
     val df2 = sql("select empname,designation from fact_table_addseg")
-    val analyzed2 = df2.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed2, "datamap_addseg"))
+    assert(TestUtil.verifyMVDataMap(df2.queryExecution.optimizedPlan, "datamap_addseg"))
     assert(df2.collect().length == 180)
 
     sql(s"drop datamap datamap_addseg")
@@ -1346,14 +1252,14 @@ class MVCreateTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql("drop table if exists t1")
     sql("drop table if exists t2")
     sql("drop datamap if exists mv1")
-    sql("create table t1(userId string,score int) stored by 'carbondata'")
-    sql("create table t2(userId string,age int,sex string) stored by 'carbondata'")
+    sql("create table t1(userId string,score int) STORED AS carbondata")
+    sql("create table t2(userId string,age int,sex string) STORED AS carbondata")
     sql("insert into t1 values(1,100),(2,500)")
     sql("insert into t2 values(1,20,'f'),(2,30,'m')")
     val result  = sql("select avg(t1.score),t2.age,t2.sex from t1 join t2 on t1.userId=t2.userId group by t2.age,t2.sex")
     sql("create datamap mv1 using 'mv' as select avg(t1.score),t2.age,t2.sex from t1 join t2 on t1.userId=t2.userId group by t2.age,t2.sex")
     val df = sql("select avg(t1.score),t2.age,t2.sex from t1 join t2 on t1.userId=t2.userId group by t2.age,t2.sex")
-    TestUtil.verifyMVDataMap(df.queryExecution.analyzed, "mv1")
+    TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "mv1")
     checkAnswer(df, result)
     intercept[ProcessMetaDataException] {
       sql("alter table t1 drop columns(userId)")
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVExceptionTestCase.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVExceptionTestCase.scala
index 397296d..702dafe 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVExceptionTestCase.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVExceptionTestCase.scala
@@ -18,13 +18,13 @@ package org.apache.carbondata.mv.rewrite
 
 import org.apache.carbondata.common.exceptions.sql.{MalformedCarbonCommandException, MalformedDataMapCommandException}
 import org.apache.spark.sql.catalyst.analysis.NoSuchTableException
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-class MVExceptionTestCase  extends CarbonQueryTest with BeforeAndAfterAll {
+class MVExceptionTestCase  extends QueryTest with BeforeAndAfterAll {
   override def beforeAll: Unit = {
     drop()
-    sql("create table main_table (name string,age int,height int) stored by 'carbondata'")
+    sql("create table main_table (name string,age int,height int) STORED AS carbondata")
   }
 
   test("test mv no base table") {
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVFilterAndJoinTest.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVFilterAndJoinTest.scala
index e7a6acc..00e6336 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVFilterAndJoinTest.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVFilterAndJoinTest.scala
@@ -17,16 +17,16 @@
 package org.apache.carbondata.mv.rewrite
 
 import org.apache.spark.sql.Row
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-class MVFilterAndJoinTest extends CarbonQueryTest with BeforeAndAfterAll {
+class MVFilterAndJoinTest extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll(): Unit = {
     drop
-    sql("create table main_table (name string,age int,height int) stored by 'carbondata'")
-    sql("create table dim_table (name string,age int,height int) stored by 'carbondata'")
-    sql("create table sdr_table (name varchar(20),score int) stored by 'carbondata'")
+    sql("create table main_table (name string,age int,height int) STORED AS carbondata")
+    sql("create table dim_table (name string,age int,height int) STORED AS carbondata")
+    sql("create table sdr_table (name varchar(20),score int) STORED AS carbondata")
   }
 
   def drop() {
@@ -43,7 +43,7 @@ class MVFilterAndJoinTest extends CarbonQueryTest with BeforeAndAfterAll {
     sql("insert into main_table select 'lily',30,160")
     sql("create datamap main_table_mv on table main_table using 'mv' as select sum(age),name from main_table group by name")
     sql("rebuild datamap main_table_mv")
-    assert(TestUtil.verifyMVDataMap(sql(querySQL).queryExecution.analyzed, "main_table_mv"))
+    assert(TestUtil.verifyMVDataMap(sql(querySQL).queryExecution.optimizedPlan, "main_table_mv"))
     checkAnswer(sql(querySQL), Seq(Row(20,"tom")))
   }
 
@@ -59,7 +59,7 @@ class MVFilterAndJoinTest extends CarbonQueryTest with BeforeAndAfterAll {
     sql("insert into sdr_table select 'tom',50")
     sql("insert into sdr_table select 'lily',80")
     sql("rebuild datamap main_table_mv1")
-    assert(TestUtil.verifyMVDataMap(sql(querySQL).queryExecution.analyzed, "main_table_mv1"))
+    assert(TestUtil.verifyMVDataMap(sql(querySQL).queryExecution.optimizedPlan, "main_table_mv1"))
     checkAnswer(sql(querySQL), Seq(Row(120,"tom")))
   }
 
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVIncrementalLoadingTestcase.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVIncrementalLoadingTestcase.scala
index 6daaf4b..af0fd96 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVIncrementalLoadingTestcase.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVIncrementalLoadingTestcase.scala
@@ -17,10 +17,8 @@
 
 package org.apache.carbondata.mv.rewrite
 
-import org.apache.spark.sql.Row
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.{CarbonEnv, Row}
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
@@ -33,7 +31,7 @@ import org.apache.carbondata.core.statusmanager.{SegmentStatus, SegmentStatusMan
  * Test Class to verify Incremental Load on  MV Datamap
  */
 
-class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAll {
+class MVIncrementalLoadingTestcase extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll(): Unit = {
     sql("drop table IF EXISTS test_table")
@@ -60,8 +58,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
       "from test_table")
     val query: String = "select empname from test_table"
     val df1 = sql(s"$query")
-    val analyzed1 = df1.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed1, "datamap1"))
+    assert(!TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "datamap1"))
     sql(s"rebuild datamap datamap1")
     val dataMapTable = CarbonMetadata.getInstance().getCarbonTable(
       CarbonCommonConstants.DATABASE_DEFAULT_NAME,
@@ -73,8 +70,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
     segmentList.add("0")
     assert(segmentList.containsAll( segmentMap.get("default.test_table")))
     val df2 = sql(s"$query")
-    val analyzed2 = df2.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed2, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df2.queryExecution.optimizedPlan, "datamap1"))
     loadDataToFactTable("test_table")
     loadDataToFactTable("test_table1")
     sql(s"rebuild datamap datamap1")
@@ -86,13 +82,11 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
     checkAnswer(sql("select empname, designation from test_table"),
       sql("select empname, designation from test_table1"))
     val df3 = sql(s"$query")
-    val analyzed3 = df3.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed3, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df3.queryExecution.optimizedPlan, "datamap1"))
     loadDataToFactTable("test_table")
     loadDataToFactTable("test_table1")
     val df4 = sql(s"$query")
-    val analyzed4 = df4.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed4, "datamap1"))
+    assert(!TestUtil.verifyMVDataMap(df4.queryExecution.optimizedPlan, "datamap1"))
     checkAnswer(sql("select empname, designation from test_table"),
       sql("select empname, designation from test_table1"))
   }
@@ -130,10 +124,10 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
   test("test MV incremental loading with update operation on main table") {
     sql("drop table IF EXISTS main_table")
     sql("drop table IF EXISTS testtable")
-    sql("create table main_table(a string,b string,c int) stored by 'carbondata'")
+    sql("create table main_table(a string,b string,c int) STORED AS carbondata")
     sql("insert into main_table values('a','abc',1)")
     sql("insert into main_table values('b','bcd',2)")
-    sql("create table testtable(a string,b string,c int) stored by 'carbondata'")
+    sql("create table testtable(a string,b string,c int) STORED AS carbondata")
     sql("insert into testtable values('a','abc',1)")
     sql("insert into testtable values('b','bcd',2)")
     sql("drop datamap if exists datamap1")
@@ -141,16 +135,14 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
     sql(s"rebuild datamap datamap1")
     var df = sql(
       s"""select a, sum(b) from main_table group by a""".stripMargin)
-    var analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
     checkAnswer(sql(" select a, sum(b) from testtable group by a"),
       sql(" select a, sum(b) from main_table group by a"))
     sql("update main_table set(a) = ('aaa') where b = 'abc'").show(false)
     sql("update testtable set(a) = ('aaa') where b = 'abc'").show(false)
-    val dataMapTable = CarbonMetadata.getInstance().getCarbonTable(
-      CarbonCommonConstants.DATABASE_DEFAULT_NAME,
-      "datamap1_table"
-    )
+    val dataMapTable = CarbonEnv.getCarbonTable(
+      Option(CarbonCommonConstants.DATABASE_DEFAULT_NAME),
+      "datamap1_table")(sqlContext.sparkSession)
     var loadMetadataDetails = SegmentStatusManager.readLoadMetadata(dataMapTable.getMetadataPath)
     assert(loadMetadataDetails(0).getSegmentStatus == SegmentStatus.MARKED_FOR_DELETE)
     checkAnswer(sql("select * from main_table"), sql("select * from testtable"))
@@ -162,8 +154,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
     segmentList.add("1")
     assert(segmentList.containsAll( segmentMap.get("default.main_table")))
     df = sql(s""" select a, sum(b) from main_table group by a""".stripMargin)
-    analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
     checkAnswer(sql(" select a, sum(b) from testtable group by a"),
       sql(" select a, sum(b) from main_table group by a"))
     sql("drop table IF EXISTS main_table")
@@ -239,7 +230,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
 
   test("test insert overwrite") {
     sql("drop table IF EXISTS test_table")
-    sql("create table test_table(a string,b string,c int) stored by 'carbondata'")
+    sql("create table test_table(a string,b string,c int) STORED AS carbondata")
     sql("insert into test_table values('a','abc',1)")
     sql("insert into test_table values('b','bcd',2)")
     sql("drop datamap if exists datamap1")
@@ -248,9 +239,9 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
     sql(s"rebuild datamap datamap1")
     checkAnswer(sql(" select a, sum(b) from test_table  group by a"), Seq(Row("a", null), Row("b", null)))
     sql("insert overwrite table test_table select 'd','abc',3")
-    val dataMapTable = CarbonMetadata.getInstance().getCarbonTable(
-      CarbonCommonConstants.DATABASE_DEFAULT_NAME,
-      "datamap1_table")
+    val dataMapTable = CarbonEnv.getCarbonTable(
+      Option(CarbonCommonConstants.DATABASE_DEFAULT_NAME),
+      "datamap1_table")(sqlContext.sparkSession)
     var loadMetadataDetails = SegmentStatusManager.readLoadMetadata(dataMapTable.getMetadataPath)
     assert(loadMetadataDetails(0).getSegmentStatus == SegmentStatus.MARKED_FOR_DELETE)
     checkAnswer(sql(" select a, sum(b) from test_table  group by a"), Seq(Row("d", null)))
@@ -265,19 +256,19 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
 
   test("test inner join with mv") {
     sql("drop table if exists products")
-    sql("create table products (product string, amount int) stored by 'carbondata' ")
+    sql("create table products (product string, amount int) STORED AS carbondata ")
     sql(s"load data INPATH '$resourcesPath/products.csv' into table products")
     sql("drop table if exists sales")
-    sql("create table sales (product string, quantity int) stored by 'carbondata'")
+    sql("create table sales (product string, quantity int) STORED AS carbondata")
     sql(s"load data INPATH '$resourcesPath/sales_data.csv' into table sales")
     sql("drop datamap if exists innerjoin")
     sql("Create datamap innerjoin using 'mv'  with deferred rebuild as Select p.product, p.amount, s.quantity, s.product from " +
         "products p, sales s where p.product=s.product")
     sql("drop table if exists products1")
-    sql("create table products1 (product string, amount int) stored by 'carbondata' ")
+    sql("create table products1 (product string, amount int) STORED AS carbondata ")
     sql(s"load data INPATH '$resourcesPath/products.csv' into table products1")
     sql("drop table if exists sales1")
-    sql("create table sales1 (product string, quantity int) stored by 'carbondata'")
+    sql("create table sales1 (product string, quantity int) STORED AS carbondata")
     sql(s"load data INPATH '$resourcesPath/sales_data.csv' into table sales1")
     sql(s"rebuild datamap innerjoin")
     checkAnswer(sql("Select p.product, p.amount, s.quantity from products1 p, sales1 s where p.product=s.product"),
@@ -296,10 +287,10 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
   test("test set segments with main table having mv datamap") {
     sql("drop table IF EXISTS main_table")
     sql("drop table IF EXISTS test_table")
-    sql("create table main_table(a string,b string,c int) stored by 'carbondata'")
+    sql("create table main_table(a string,b string,c int) STORED AS carbondata")
     sql("insert into main_table values('a','abc',1)")
     sql("insert into main_table values('b','bcd',2)")
-    sql("create table test_table(a string,b string,c int) stored by 'carbondata'")
+    sql("create table test_table(a string,b string,c int) STORED AS carbondata")
     sql("insert into test_table values('a','abc',1)")
     sql("insert into test_table values('b','bcd',2)")
     sql("drop datamap if exists datamap_mt")
@@ -319,7 +310,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
 
   test("test set segments with main table having mv datamap before rebuild") {
     sql("drop table IF EXISTS main_table")
-    sql("create table main_table(a string,b string,c int) stored by 'carbondata'")
+    sql("create table main_table(a string,b string,c int) STORED AS carbondata")
     sql("insert into main_table values('a','abc',1)")
     sql("insert into main_table values('b','bcd',2)")
     sql("drop datamap if exists datamap1")
@@ -327,19 +318,17 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
     sql("SET carbon.input.segments.default.main_table=1")
     sql(s"rebuild datamap datamap1")
     val df = sql("select a, sum(c) from main_table  group by a")
-    val analyzed = df.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed, "datamap1"))
+    assert(!TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
     sql("reset")
     checkAnswer(sql("select a, sum(c) from main_table  group by a"), Seq(Row("a", 1), Row("b", 2)))
     val df1= sql("select a, sum(c) from main_table  group by a")
-    val analyzed1 = df1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "datamap1"))
     sql("drop table IF EXISTS main_table")
   }
 
   test("test datamap table after datamap table compaction- custom") {
     sql("drop table IF EXISTS main_table")
-    sql("create table main_table(a string,b string,c int) stored by 'carbondata'")
+    sql("create table main_table(a string,b string,c int) STORED AS carbondata")
     sql("insert into main_table values('a','abc',1)")
     sql("insert into main_table values('b','bcd',2)")
     sql("drop datamap if exists datamap1")
@@ -368,7 +357,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
   test("test sum(a) + sum(b)") {
     // Full rebuild will happen in this case
     sql("drop table IF EXISTS main_table")
-    sql("create table main_table(a int,b int,c int) stored by 'carbondata'")
+    sql("create table main_table(a int,b int,c int) STORED AS carbondata")
     sql("insert into main_table values(1,2,3)")
     sql("insert into main_table values(1,4,5)")
     sql("drop datamap if exists datamap_1")
@@ -406,8 +395,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
     segmentList.add("0")
     assert(segmentList.containsAll( segmentMap.get("default.test_table")))
     val df2 = sql(s"$query")
-    val analyzed2 = df2.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed2, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df2.queryExecution.optimizedPlan, "datamap1"))
     loadDataToFactTable("test_table")
     loadDataToFactTable("test_table1")
     loadMetadataDetails = SegmentStatusManager.readLoadMetadata(dataMapTable.getMetadataPath)
@@ -418,13 +406,11 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
     checkAnswer(sql("select empname, designation from test_table"),
       sql("select empname, designation from test_table1"))
     val df3 = sql(s"$query")
-    val analyzed3 = df3.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed3, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df3.queryExecution.optimizedPlan, "datamap1"))
     loadDataToFactTable("test_table")
     loadDataToFactTable("test_table1")
     val df4 = sql(s"$query")
-    val analyzed4 = df4.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed4, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df4.queryExecution.optimizedPlan, "datamap1"))
     checkAnswer(sql("select empname, designation from test_table"),
       sql("select empname, designation from test_table1"))
   }
@@ -432,17 +418,16 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
   test("test MV incremental loading on non-lazy datamap with update operation on main table") {
     sql("drop table IF EXISTS main_table")
     sql("drop table IF EXISTS testtable")
-    sql("create table main_table(a string,b string,c int) stored by 'carbondata'")
+    sql("create table main_table(a string,b string,c int) STORED AS carbondata")
     sql("insert into main_table values('a','abc',1)")
     sql("insert into main_table values('b','bcd',2)")
-    sql("create table testtable(a string,b string,c int) stored by 'carbondata'")
+    sql("create table testtable(a string,b string,c int) STORED AS carbondata")
     sql("insert into testtable values('a','abc',1)")
     sql("insert into testtable values('b','bcd',2)")
     sql("drop datamap if exists datamap1")
     sql("create datamap datamap1 using 'mv' as select a, sum(b) from main_table group by a")
     var df = sql(s"""select a, sum(b) from main_table group by a""".stripMargin)
-    var analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
     checkAnswer(sql(" select a, sum(b) from testtable group by a"),
       sql(" select a, sum(b) from main_table group by a"))
     sql("update main_table set(a) = ('aaa') where b = 'abc'").show(false)
@@ -458,8 +443,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
     segmentList.add("1")
     assert(segmentList.containsAll(segmentMap.get("default.main_table")))
     df = sql(s""" select a, sum(b) from main_table group by a""".stripMargin)
-    analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
     checkAnswer(sql(" select a, sum(b) from testtable group by a"),
       sql(" select a, sum(b) from main_table group by a"))
     sql("drop table IF EXISTS main_table")
@@ -469,17 +453,16 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
   test("test MV incremental loading on non-lazy datamap with delete operation on main table") {
     sql("drop table IF EXISTS main_table")
     sql("drop table IF EXISTS testtable")
-    sql("create table main_table(a string,b string,c int) stored by 'carbondata'")
+    sql("create table main_table(a string,b string,c int) STORED AS carbondata")
     sql("insert into main_table values('a','abc',1)")
     sql("insert into main_table values('b','bcd',2)")
-    sql("create table testtable(a string,b string,c int) stored by 'carbondata'")
+    sql("create table testtable(a string,b string,c int) STORED AS carbondata")
     sql("insert into testtable values('a','abc',1)")
     sql("insert into testtable values('b','bcd',2)")
     sql("drop datamap if exists datamap1")
     sql("create datamap datamap1 using 'mv' as select a, sum(b) from main_table group by a")
     var df = sql(s"""select a, sum(b) from main_table group by a""".stripMargin)
-    var analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
     checkAnswer(sql(" select a, sum(b) from testtable group by a"),
       sql(" select a, sum(b) from main_table group by a"))
     sql("delete from  main_table  where b = 'abc'").show(false)
@@ -495,8 +478,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
     segmentList.add("1")
     assert(segmentList.containsAll(segmentMap.get("default.main_table")))
     df = sql(s""" select a, sum(b) from main_table group by a""".stripMargin)
-    analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
     checkAnswer(sql(" select a, sum(b) from testtable group by a"),
       sql(" select a, sum(b) from main_table group by a"))
     sql("drop table IF EXISTS main_table")
@@ -505,7 +487,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
 
   test("test whether datamap table is compacted after main table compaction") {
     sql("drop table IF EXISTS main_table")
-    sql("create table main_table(a string,b string,c int) stored by 'carbondata'")
+    sql("create table main_table(a string,b string,c int) STORED AS carbondata")
     sql("insert into main_table values('a','abc',1)")
     sql("insert into main_table values('b','bcd',2)")
     sql("drop datamap if exists datamap1")
@@ -520,7 +502,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
 
   test("test delete record when table contains single segment") {
     sql("drop table IF EXISTS main_table")
-    sql("create table main_table(a string,b string,c int) stored by 'carbondata'")
+    sql("create table main_table(a string,b string,c int) STORED AS carbondata")
     sql("insert into main_table values('a','abc',1)")
     sql("drop datamap if exists datamap1")
     sql("create datamap datamap1 using 'mv' as select a, sum(b) from main_table group by a")
@@ -535,13 +517,13 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
 
   test("set segments on datamap table") {
     sql("drop table IF EXISTS main_table")
-    sql("create table main_table(a string,b string,c int) stored by 'carbondata'")
+    sql("create table main_table(a string,b string,c int) STORED AS carbondata")
     sql("insert into main_table values('a','abc',1)")
     sql("drop datamap if exists datamap1")
     sql("create datamap datamap1 using 'mv' as select a,b from main_table")
     sql("insert into main_table values('b','abcd',1)")
     sql("SET carbon.input.segments.default.datamap1_table=0")
-    assert(sql("select a,b from main_table").count() == 1)
+    assert(sql("select a,b from main_table").collect().length == 1)
     sql("drop table IF EXISTS main_table")
   }
 
@@ -576,7 +558,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
          |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
          |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
          |  utilization int,salary int)
-         | STORED BY 'org.apache.carbondata.format' TBLPROPERTIES('AUTO_LOAD_MERGE'='true','COMPACTION_LEVEL_THRESHOLD'='6,0')
+         | STORED AS carbondata TBLPROPERTIES('AUTO_LOAD_MERGE'='true','COMPACTION_LEVEL_THRESHOLD'='6,0')
       """.stripMargin)
     loadDataToFactTable("test_table")
     sql("drop datamap if exists datamap1")
@@ -593,8 +575,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
     val result = sql("show datamap on table test_table").collectAsList()
     assert(result.get(0).get(5).toString.contains("\"default.test_table\":\"12.1\""))
     val df = sql(s""" select empname, designation from test_table""".stripMargin)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_com"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_com"))
   }
 
   test("test all aggregate functions") {
@@ -615,8 +596,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
       "skewness(workgroupcategory),kurtosis(workgroupcategory),covar_pop(projectcode," +
       "workgroupcategory),covar_samp(projectcode,workgroupcategory),projectjoindate from " +
       "test_table group by projectjoindate")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_agg"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_agg"))
     checkAnswer(sql(
       "select variance(workgroupcategory),var_samp(projectcode), var_pop(projectcode), stddev" +
       "(projectcode),stddev_samp(workgroupcategory),corr(projectcode,workgroupcategory)," +
@@ -651,7 +631,7 @@ class MVIncrementalLoadingTestcase extends CarbonQueryTest with BeforeAndAfterAl
          |  workgroupcategory int, workgroupcategoryname String, deptno int, deptname String,
          |  projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int,
          |  utilization int,salary int)
-         | STORED BY 'org.apache.carbondata.format'
+         | STORED AS carbondata
       """.stripMargin)
   }
 
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVInvalidTestCase.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVInvalidTestCase.scala
index aad3f89..35af5a5 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVInvalidTestCase.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVInvalidTestCase.scala
@@ -16,14 +16,14 @@
  */
 package org.apache.carbondata.mv.rewrite
 
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-class MVInvalidTestCase  extends CarbonQueryTest with BeforeAndAfterAll {
+class MVInvalidTestCase  extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll(): Unit = {
     drop
-    sql("create table main_table (name string,age int,height int) stored by 'carbondata'")
+    sql("create table main_table (name string,age int,height int) STORED AS carbondata")
   }
 
   def drop {
@@ -38,7 +38,7 @@ class MVInvalidTestCase  extends CarbonQueryTest with BeforeAndAfterAll {
     sql("create datamap main_table_mv on table main_table using 'mv' as select age,name,height from main_table where name = 'tom'")
     sql("rebuild datamap main_table_mv")
 
-    assert(!TestUtil.verifyMVDataMap(sql(querySQL).queryExecution.analyzed, "main_table_mv"))
+    assert(!TestUtil.verifyMVDataMap(sql(querySQL).queryExecution.optimizedPlan, "main_table_mv"))
   }
 
   override def afterAll(): Unit = {
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVMultiJoinTestCase.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVMultiJoinTestCase.scala
index 79a6a1c..8c406fb 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVMultiJoinTestCase.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVMultiJoinTestCase.scala
@@ -17,12 +17,10 @@
 package org.apache.carbondata.mv.rewrite
 
 import org.apache.spark.sql.Row
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-class MVMultiJoinTestCase extends CarbonQueryTest with BeforeAndAfterAll {
+class MVMultiJoinTestCase extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll(){
     drop
@@ -52,7 +50,7 @@ class MVMultiJoinTestCase extends CarbonQueryTest with BeforeAndAfterAll {
         "select p.title,c.title,c.pid,p.aid from areas as p inner join areas as c on " +
         "c.pid=p.aid where p.title = 'hebei'")
     val frame = sql(mvSQL)
-    assert(TestUtil.verifyMVDataMap(frame.queryExecution.analyzed, "table_mv"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "table_mv"))
     checkAnswer(frame, Seq(Row("hebei","shijiazhuang"), Row("hebei","handan")))
   }
 
@@ -73,7 +71,7 @@ class MVMultiJoinTestCase extends CarbonQueryTest with BeforeAndAfterAll {
        """.stripMargin
     sql("create datamap table_mv using 'mv' as " + "select sdr.name,sum(sdr.score),dim.age,dim_other.height,count(dim.name) as c1, count(dim_other.name) as c2 from sdr_table sdr left join dim_table dim on sdr.name = dim.name left join dim_table dim_other on sdr.name = dim_other.name group by sdr.name,dim.age,dim_other.height")
     val frame = sql(mvSQL)
-    assert(TestUtil.verifyMVDataMap(frame.queryExecution.analyzed, "table_mv"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "table_mv"))
     checkAnswer(frame, Seq(Row("lily",80,30,160),Row("tom",120,20,170)))
   }
 
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVRewriteTestCase.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVRewriteTestCase.scala
index 5999bbc..50e2865 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVRewriteTestCase.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVRewriteTestCase.scala
@@ -16,21 +16,18 @@
  */
 package org.apache.carbondata.mv.rewrite
 
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-class MVRewriteTestCase extends CarbonQueryTest with BeforeAndAfterAll {
+class MVRewriteTestCase extends QueryTest with BeforeAndAfterAll {
 
 
   override def beforeAll(): Unit = {
     drop
     sql("create table region(l4id string,l4name string) using carbondata")
-    sql(
-      s"""create table data_table(
-         	         |starttime int, seq long,succ long,LAYER4ID string,tmp int)
-         	         |using carbondata""".stripMargin)
+    sql(s"""create table data_table(
+        |starttime int, seq long,succ long,LAYER4ID string,tmp int)
+        |using carbondata""".stripMargin)
   }
 
   def drop(): Unit ={
@@ -41,46 +38,47 @@ class MVRewriteTestCase extends CarbonQueryTest with BeforeAndAfterAll {
   test("test mv count and case when expression") {
     sql("drop datamap if exists data_table_mv")
     sql(s"""create datamap data_table_mv using 'mv' as
-           	           | SELECT STARTTIME,LAYER4ID,
-           	           |  SUM(seq) AS seq_c,
-           	           |  SUM(succ)  AS succ_c
-           	           | FROM data_table
-           	           | GROUP BY STARTTIME,LAYER4ID""".stripMargin)
+        | SELECT STARTTIME,LAYER4ID,
+        |  SUM(seq) AS seq_c,
+        |  SUM(succ)  AS succ_c
+        | FROM data_table
+        | GROUP BY STARTTIME,LAYER4ID""".stripMargin)
 
     sql("rebuild datamap data_table_mv")
 
-    var frame = sql(s"""SELECT  MT.`3600` AS `3600`,
-                       	                       | MT.`2250410101` AS `2250410101`,
-                       	                       | (CASE WHEN (SUM(COALESCE(seq_c, 0))) = 0 THEN NULL
-                       	                       |   ELSE
-                       	                       |   (CASE WHEN (CAST((SUM(COALESCE(seq_c, 0))) AS int)) = 0 THEN 0
-                       	                       |     ELSE ((CAST((SUM(COALESCE(succ_c, 0))) AS double))
-                       	                       |     / (CAST((SUM(COALESCE(seq_c, 0))) AS double)))
-                       	                       |     END) * 100
-                       	                       |   END) AS rate
-                       	                       | FROM (
-                       	                       |   SELECT sum_result.*, H_REGION.`2250410101` FROM
-                       	                       |   (SELECT cast(floor((starttime + 28800) / 3600) * 3600 - 28800 as int) AS `3600`,
-                       	                       |     LAYER4ID,
-                       	                       |     COALESCE(SUM(seq), 0) AS seq_c,
-                       	                       |     COALESCE(SUM(succ), 0) AS succ_c
-                       	                       |       FROM data_table
-                       	                       |       WHERE STARTTIME >= 1549866600 AND STARTTIME < 1549899900
-                       	                       |       GROUP BY cast(floor((STARTTIME + 28800) / 3600) * 3600 - 28800 as int),LAYER4ID
-                       	                       |   )sum_result
-                       	                       |   LEFT JOIN
-                       	                       |   (SELECT l4id AS `225040101`,
-                       	                       |     l4name AS `2250410101`,
-                       	                       |     l4name AS NAME_2250410101
-                       	                       |       FROM region
-                       	                       |       GROUP BY l4id, l4name) H_REGION
-                       	                       |   ON sum_result.LAYER4ID = H_REGION.`225040101`
-                       	                       | WHERE H_REGION.NAME_2250410101 IS NOT NULL
-                       	                       | ) MT
-                       	                       | GROUP BY MT.`3600`, MT.`2250410101`
-                       	                       | ORDER BY `3600` ASC LIMIT 5000""".stripMargin)
+    val frame =
+      sql(s"""SELECT  MT.`3600` AS `3600`,
+          | MT.`2250410101` AS `2250410101`,
+          | (CASE WHEN (SUM(COALESCE(seq_c, 0))) = 0 THEN NULL
+          |   ELSE
+          |   (CASE WHEN (CAST((SUM(COALESCE(seq_c, 0))) AS int)) = 0 THEN 0
+          |     ELSE ((CAST((SUM(COALESCE(succ_c, 0))) AS double))
+          |     / (CAST((SUM(COALESCE(seq_c, 0))) AS double)))
+          |     END) * 100
+          |   END) AS rate
+          | FROM (
+          |   SELECT sum_result.*, H_REGION.`2250410101` FROM
+          |   (SELECT cast(floor((starttime + 28800) / 3600) * 3600 - 28800 as int) AS `3600`,
+          |     LAYER4ID,
+          |     COALESCE(SUM(seq), 0) AS seq_c,
+          |     COALESCE(SUM(succ), 0) AS succ_c
+          |       FROM data_table
+          |       WHERE STARTTIME >= 1549866600 AND STARTTIME < 1549899900
+          |       GROUP BY cast(floor((STARTTIME + 28800) / 3600) * 3600 - 28800 as int),LAYER4ID
+          |   )sum_result
+          |   LEFT JOIN
+          |   (SELECT l4id AS `225040101`,
+          |     l4name AS `2250410101`,
+          |     l4name AS NAME_2250410101
+          |       FROM region
+          |       GROUP BY l4id, l4name) H_REGION
+          |   ON sum_result.LAYER4ID = H_REGION.`225040101`
+          | WHERE H_REGION.NAME_2250410101 IS NOT NULL
+          | ) MT
+          | GROUP BY MT.`3600`, MT.`2250410101`
+          | ORDER BY `3600` ASC LIMIT 5000""".stripMargin)
 
-    assert(TestUtil.verifyMVDataMap(frame.queryExecution.analyzed, "data_table_mv"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "data_table_mv"))
   }
 
   override def afterAll(): Unit = {
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVSampleTestCase.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVSampleTestCase.scala
index 5922750..96a9433 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVSampleTestCase.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVSampleTestCase.scala
@@ -18,16 +18,14 @@ package org.apache.carbondata.mv.rewrite
 
 import java.io.File
 
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.mv.rewrite.matching.TestSQLBatch._
 
-class MVSampleTestCase extends CarbonQueryTest with BeforeAndAfterAll {
+class MVSampleTestCase extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll {
     drop()
@@ -58,7 +56,7 @@ class MVSampleTestCase extends CarbonQueryTest with BeforeAndAfterAll {
          |  `qty`     int,
          |  `disc`    string
          |)
-         |STORED BY 'org.apache.carbondata.format'
+         |STORED AS carbondata
         """.stripMargin.trim,
       s"""
          |CREATE TABLE Dim (
@@ -67,14 +65,14 @@ class MVSampleTestCase extends CarbonQueryTest with BeforeAndAfterAll {
          |  `state`   string,
          |  `country` string
          |)
-         |STORED BY 'org.apache.carbondata.format'
+         |STORED AS carbondata
         """.stripMargin.trim,
       s"""
          |CREATE TABLE Item (
          |  `i_item_id`     int,
          |  `i_item_sk`     int
          |)
-         |STORED BY 'org.apache.carbondata.format'
+         |STORED AS carbondata
         """.stripMargin.trim
     )
   }
@@ -84,8 +82,7 @@ class MVSampleTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_sm1")
     sql(s"create datamap datamap_sm1 using 'mv' as ${sampleTestCases(0)._2}")
     val df = sql(sampleTestCases(0)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed, "datamap_sm1"))
+    assert(!TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_sm1"))
     sql(s"drop datamap datamap_sm1")
   }
 
@@ -93,8 +90,7 @@ class MVSampleTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_sm2")
     sql(s"create datamap datamap_sm2 using 'mv' as ${sampleTestCases(2)._2}")
     val df = sql(sampleTestCases(2)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_sm2"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_sm2"))
     sql(s"drop datamap datamap_sm2")
   }
 
@@ -102,8 +98,7 @@ class MVSampleTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_sm3")
     sql(s"create datamap datamap_sm3 using 'mv' as ${sampleTestCases(3)._2}")
     val df = sql(sampleTestCases(3)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_sm3"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_sm3"))
     sql(s"drop datamap datamap_sm3")
   }
 
@@ -111,8 +106,7 @@ class MVSampleTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_sm4")
     sql(s"create datamap datamap_sm4 using 'mv' as ${sampleTestCases(4)._2}")
     val df = sql(sampleTestCases(4)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_sm4"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_sm4"))
     sql(s"drop datamap datamap_sm4")
   }
 
@@ -120,8 +114,7 @@ class MVSampleTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_sm5")
     sql(s"create datamap datamap_sm5 using 'mv' as ${sampleTestCases(5)._2}")
     val df = sql(sampleTestCases(5)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(!TestUtil.verifyMVDataMap(analyzed, "datamap_sm5"))
+    assert(!TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_sm5"))
     sql(s"drop datamap datamap_sm5")
   }
 
@@ -129,8 +122,7 @@ class MVSampleTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_sm6")
     sql(s"create datamap datamap_sm6 using 'mv' as ${sampleTestCases(6)._2}")
     val df = sql(sampleTestCases(6)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_sm6"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_sm6"))
     sql(s"drop datamap datamap_sm6")
   }
 
@@ -138,8 +130,7 @@ class MVSampleTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_sm7")
     sql(s"create datamap datamap_sm7 using 'mv' as ${sampleTestCases(7)._2}")
     val df = sql(sampleTestCases(7)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_sm7"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_sm7"))
     sql(s"drop datamap datamap_sm7")
   }
 
@@ -147,8 +138,7 @@ class MVSampleTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_sm8")
     sql(s"create datamap datamap_sm8 using 'mv' as ${sampleTestCases(8)._2}")
     val df = sql(sampleTestCases(8)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_sm8"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_sm8"))
     sql(s"drop datamap datamap_sm8")
   }
 
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTPCDSTestCase.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTPCDSTestCase.scala
index 1963e9b..05fa2b9 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTPCDSTestCase.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTPCDSTestCase.scala
@@ -18,9 +18,7 @@ package org.apache.carbondata.mv.rewrite
 
 import java.io.File
 
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
@@ -28,7 +26,7 @@ import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.mv.rewrite.matching.TestTPCDS_1_4_Batch._
 import org.apache.carbondata.mv.testutil.Tpcds_1_4_Tables.tpcds1_4Tables
 
-class MVTPCDSTestCase extends CarbonQueryTest with BeforeAndAfterAll {
+class MVTPCDSTestCase extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll {
     drop()
@@ -52,8 +50,7 @@ class MVTPCDSTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_tpcds1")
     sql(s"create datamap datamap_tpcds1 using 'mv' as ${tpcds_1_4_testCases(0)._2}")
     val df = sql(tpcds_1_4_testCases(0)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_tpcds1"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_tpcds1"))
     sql(s"drop datamap datamap_tpcds1")
   }
 
@@ -61,8 +58,7 @@ class MVTPCDSTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_tpcds3")
     sql(s"create datamap datamap_tpcds3 using 'mv' as ${tpcds_1_4_testCases(2)._2}")
     val df = sql(tpcds_1_4_testCases(2)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_tpcds3"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_tpcds3"))
     sql(s"drop datamap datamap_tpcds3")
   }
 
@@ -70,8 +66,7 @@ class MVTPCDSTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_tpcds4")
     sql(s"create datamap datamap_tpcds4 using 'mv' as ${tpcds_1_4_testCases(3)._2}")
     val df = sql(tpcds_1_4_testCases(3)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_tpcds4"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_tpcds4"))
     sql(s"drop datamap datamap_tpcds4")
   }
 
@@ -79,8 +74,7 @@ class MVTPCDSTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_tpcds5")
     sql(s"create datamap datamap_tpcds5 using 'mv' as ${tpcds_1_4_testCases(4)._2}")
     val df = sql(tpcds_1_4_testCases(4)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_tpcds5"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_tpcds5"))
     sql(s"drop datamap datamap_tpcds5")
   }
 
@@ -88,8 +82,7 @@ class MVTPCDSTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_tpcds6")
     sql(s"create datamap datamap_tpcds6 using 'mv' as ${tpcds_1_4_testCases(5)._2}")
     val df = sql(tpcds_1_4_testCases(5)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_tpcds6"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_tpcds6"))
     sql(s"drop datamap datamap_tpcds6")
   }
 
@@ -97,8 +90,7 @@ class MVTPCDSTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_tpcds8")
     sql(s"create datamap datamap_tpcds8 using 'mv' as ${tpcds_1_4_testCases(7)._2}")
     val df = sql(tpcds_1_4_testCases(7)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_tpcds8"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_tpcds8"))
     sql(s"drop datamap datamap_tpcds8")
   }
 
@@ -106,8 +98,7 @@ class MVTPCDSTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_tpcds11")
     sql(s"create datamap datamap_tpcds11 using 'mv' as ${tpcds_1_4_testCases(10)._2}")
     val df = sql(tpcds_1_4_testCases(10)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_tpcds11"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_tpcds11"))
     sql(s"drop datamap datamap_tpcds11")
   }
 
@@ -115,8 +106,7 @@ class MVTPCDSTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_tpcds15")
     sql(s"create datamap datamap_tpcds15 using 'mv' as ${tpcds_1_4_testCases(14)._2}")
     val df = sql(tpcds_1_4_testCases(14)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_tpcds15"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_tpcds15"))
     sql(s"drop datamap datamap_tpcds15")
   }
 
@@ -124,8 +114,7 @@ class MVTPCDSTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap_tpcds16")
     sql(s"create datamap datamap_tpcds16 using 'mv' as ${tpcds_1_4_testCases(15)._2}")
     val df = sql(tpcds_1_4_testCases(15)._3)
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap_tpcds16"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap_tpcds16"))
     sql(s"drop datamap datamap_tpcds16")
   }
 
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTpchTestCase.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTpchTestCase.scala
index a8dfeca..7525a14 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTpchTestCase.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/MVTpchTestCase.scala
@@ -19,15 +19,11 @@ package org.apache.carbondata.mv.rewrite
 import java.io.File
 
 import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.catalyst.plans.logical.{Aggregate, LogicalPlan}
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
-class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
+class MVTpchTestCase extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll {
     drop()
@@ -36,12 +32,12 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     val integrationPath = s"$projectPath/integration"
     val resourcesPath = s"$integrationPath/spark-common-test/src/test/resources"
 
-    sql(s"""create table if not exists LINEITEM(  L_SHIPDATE date,  L_SHIPMODE string,  L_SHIPINSTRUCT string,  L_RETURNFLAG string,  L_RECEIPTDATE date,  L_ORDERKEY INT ,  L_PARTKEY INT ,  L_SUPPKEY   string,  L_LINENUMBER int,  L_QUANTITY double,  L_EXTENDEDPRICE double,  L_DISCOUNT double,  L_TAX double,  L_LINESTATUS string,  L_COMMITDATE date,  L_COMMENT  string) STORED BY 'org.apache.carbondata.format'""")
-    sql(s"""create table if not exists ORDERS(  O_ORDERDATE date,  O_ORDERPRIORITY string,  O_ORDERSTATUS string,  O_ORDERKEY int,  O_CUSTKEY string,  O_TOTALPRICE double,  O_CLERK string,  O_SHIPPRIORITY int,  O_COMMENT string) STORED BY 'org.apache.carbondata.format'""")
-    sql(s"""create table if not exists CUSTOMER(  C_MKTSEGMENT string,  C_NATIONKEY string,  C_CUSTKEY string,  C_NAME string,  C_ADDRESS string,  C_PHONE string,  C_ACCTBAL double,  C_COMMENT string) STORED BY 'org.apache.carbondata.format'""")
-    sql(s"""create table if not exists REGION(  R_NAME string,  R_REGIONKEY string,  R_COMMENT string) STORED BY 'org.apache.carbondata.format'""")
-    sql(s"""create table if not exists NATION (  N_NAME string,  N_NATIONKEY string,  N_REGIONKEY string,  N_COMMENT  string) STORED BY 'org.apache.carbondata.format'""")
-    sql(s"""create table if not exists SUPPLIER(S_COMMENT string,S_SUPPKEY string,S_NAME string, S_ADDRESS string, S_NATIONKEY string, S_PHONE string, S_ACCTBAL double) STORED BY 'org.apache.carbondata.format'""")
+    sql(s"""create table if not exists LINEITEM(  L_SHIPDATE date,  L_SHIPMODE string,  L_SHIPINSTRUCT string,  L_RETURNFLAG string,  L_RECEIPTDATE date,  L_ORDERKEY INT ,  L_PARTKEY INT ,  L_SUPPKEY   string,  L_LINENUMBER int,  L_QUANTITY double,  L_EXTENDEDPRICE double,  L_DISCOUNT double,  L_TAX double,  L_LINESTATUS string,  L_COMMITDATE date,  L_COMMENT  string) STORED AS carbondata""")
+    sql(s"""create table if not exists ORDERS(  O_ORDERDATE date,  O_ORDERPRIORITY string,  O_ORDERSTATUS string,  O_ORDERKEY int,  O_CUSTKEY string,  O_TOTALPRICE double,  O_CLERK string,  O_SHIPPRIORITY int,  O_COMMENT string) STORED AS carbondata""")
+    sql(s"""create table if not exists CUSTOMER(  C_MKTSEGMENT string,  C_NATIONKEY string,  C_CUSTKEY string,  C_NAME string,  C_ADDRESS string,  C_PHONE string,  C_ACCTBAL double,  C_COMMENT string) STORED AS carbondata""")
+    sql(s"""create table if not exists REGION(  R_NAME string,  R_REGIONKEY string,  R_COMMENT string) STORED AS carbondata""")
+    sql(s"""create table if not exists NATION (  N_NAME string,  N_NATIONKEY string,  N_REGIONKEY string,  N_COMMENT  string) STORED AS carbondata""")
+    sql(s"""create table if not exists SUPPLIER(S_COMMENT string,S_SUPPKEY string,S_NAME string, S_ADDRESS string, S_NATIONKEY string, S_PHONE string, S_ACCTBAL double) STORED AS carbondata""")
 
     sql(s"""load data inpath "$resourcesPath/tpch/lineitem.csv" into table lineitem options('DELIMITER'='|','FILEHEADER'='L_ORDERKEY,L_PARTKEY,L_SUPPKEY,L_LINENUMBER,L_QUANTITY,L_EXTENDEDPRICE,L_DISCOUNT,L_TAX,L_RETURNFLAG,L_LINESTATUS,L_SHIPDATE,L_COMMITDATE,L_RECEIPTDATE,L_SHIPINSTRUCT,L_SHIPMODE,L_COMMENT')""")
     sql(s"""load data inpath "$resourcesPath/tpch/orders.csv" into table ORDERS options('DELIMITER'='|','FILEHEADER'='O_ORDERKEY,O_CUSTKEY,O_ORDERSTATUS,O_TOTALPRICE,O_ORDERDATE,O_ORDERPRIORITY,O_CLERK,O_SHIPPRIORITY,O_COMMENT')""")
@@ -51,12 +47,12 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"""load data inpath "$resourcesPath/tpch/supplier.csv" into table SUPPLIER options('DELIMITER'='|','FILEHEADER'='S_SUPPKEY,S_NAME,S_ADDRESS,S_NATIONKEY,S_PHONE,S_ACCTBAL,S_COMMENT')""")
 
 
-    sql(s"""create table if not exists LINEITEM1(  L_SHIPDATE date,  L_SHIPMODE string,  L_SHIPINSTRUCT string,  L_RETURNFLAG string,  L_RECEIPTDATE date,  L_ORDERKEY INT ,  L_PARTKEY INT ,  L_SUPPKEY   string,  L_LINENUMBER int,  L_QUANTITY double,  L_EXTENDEDPRICE double,  L_DISCOUNT double,  L_TAX double,  L_LINESTATUS string,  L_COMMITDATE date,  L_COMMENT  string) STORED BY 'org.apache.carbondata.format'""")
-    sql(s"""create table if not exists ORDERS1(  O_ORDERDATE date,  O_ORDERPRIORITY string,  O_ORDERSTATUS string,  O_ORDERKEY int,  O_CUSTKEY string,  O_TOTALPRICE double,  O_CLERK string,  O_SHIPPRIORITY int,  O_COMMENT string) STORED BY 'org.apache.carbondata.format'""")
-    sql(s"""create table if not exists CUSTOMER1(  C_MKTSEGMENT string,  C_NATIONKEY string,  C_CUSTKEY string,  C_NAME string,  C_ADDRESS string,  C_PHONE string,  C_ACCTBAL double,  C_COMMENT string) STORED BY 'org.apache.carbondata.format'""")
-    sql(s"""create table if not exists REGION1(  R_NAME string,  R_REGIONKEY string,  R_COMMENT string) STORED BY 'org.apache.carbondata.format'""")
-    sql(s"""create table if not exists NATION1 (  N_NAME string,  N_NATIONKEY string,  N_REGIONKEY string,  N_COMMENT  string) STORED BY 'org.apache.carbondata.format'""")
-    sql(s"""create table if not exists SUPPLIER1(S_COMMENT string,S_SUPPKEY string,S_NAME string, S_ADDRESS string, S_NATIONKEY string, S_PHONE string, S_ACCTBAL double) STORED BY 'org.apache.carbondata.format'""")
+    sql(s"""create table if not exists LINEITEM1(  L_SHIPDATE date,  L_SHIPMODE string,  L_SHIPINSTRUCT string,  L_RETURNFLAG string,  L_RECEIPTDATE date,  L_ORDERKEY INT ,  L_PARTKEY INT ,  L_SUPPKEY   string,  L_LINENUMBER int,  L_QUANTITY double,  L_EXTENDEDPRICE double,  L_DISCOUNT double,  L_TAX double,  L_LINESTATUS string,  L_COMMITDATE date,  L_COMMENT  string) STORED AS carbondata""")
+    sql(s"""create table if not exists ORDERS1(  O_ORDERDATE date,  O_ORDERPRIORITY string,  O_ORDERSTATUS string,  O_ORDERKEY int,  O_CUSTKEY string,  O_TOTALPRICE double,  O_CLERK string,  O_SHIPPRIORITY int,  O_COMMENT string) STORED AS carbondata""")
+    sql(s"""create table if not exists CUSTOMER1(  C_MKTSEGMENT string,  C_NATIONKEY string,  C_CUSTKEY string,  C_NAME string,  C_ADDRESS string,  C_PHONE string,  C_ACCTBAL double,  C_COMMENT string) STORED AS carbondata""")
+    sql(s"""create table if not exists REGION1(  R_NAME string,  R_REGIONKEY string,  R_COMMENT string) STORED AS carbondata""")
+    sql(s"""create table if not exists NATION1 (  N_NAME string,  N_NATIONKEY string,  N_REGIONKEY string,  N_COMMENT  string) STORED AS carbondata""")
+    sql(s"""create table if not exists SUPPLIER1(S_COMMENT string,S_SUPPKEY string,S_NAME string, S_ADDRESS string, S_NATIONKEY string, S_PHONE string, S_ACCTBAL double) STORED AS carbondata""")
 
     sql(s"""load data inpath "$resourcesPath/tpch/lineitem.csv" into table lineitem1 options('DELIMITER'='|','FILEHEADER'='L_ORDERKEY,L_PARTKEY,L_SUPPKEY,L_LINENUMBER,L_QUANTITY,L_EXTENDEDPRICE,L_DISCOUNT,L_TAX,L_RETURNFLAG,L_LINESTATUS,L_SHIPDATE,L_COMMITDATE,L_RECEIPTDATE,L_SHIPINSTRUCT,L_SHIPMODE,L_COMMENT')""")
     sql(s"""load data inpath "$resourcesPath/tpch/orders.csv" into table ORDERS1 options('DELIMITER'='|','FILEHEADER'='O_ORDERKEY,O_CUSTKEY,O_ORDERSTATUS,O_TOTALPRICE,O_ORDERDATE,O_ORDERPRIORITY,O_CLERK,O_SHIPPRIORITY,O_COMMENT')""")
@@ -64,16 +60,13 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"""load data inpath "$resourcesPath/tpch/region.csv" into table REGION1 options('DELIMITER'='|','FILEHEADER'='R_REGIONKEY,R_NAME,R_COMMENT')""")
     sql(s"""load data inpath "$resourcesPath/tpch/nation.csv" into table NATION1 options('DELIMITER'='|','FILEHEADER'='N_NATIONKEY,N_NAME,N_REGIONKEY,N_COMMENT')""")
     sql(s"""load data inpath "$resourcesPath/tpch/supplier.csv" into table SUPPLIER1 options('DELIMITER'='|','FILEHEADER'='S_SUPPKEY,S_NAME,S_ADDRESS,S_NATIONKEY,S_PHONE,S_ACCTBAL,S_COMMENT')""")
-
-
   }
 
   test("test create datamap with tpch1") {
     sql(s"drop datamap if exists datamap1")
     sql("create datamap datamap1 using 'mv' as select l_returnflag, l_linestatus,l_shipdate, sum(l_quantity) as sum_qty, sum(l_extendedprice) as sum_base_price, sum(l_extendedprice*(1-l_discount)) as sum_disc_price, sum(l_extendedprice*(1-l_discount)*(1+l_tax)) as sum_charge,count(*) as count_order from lineitem group by l_returnflag, l_linestatus,l_shipdate")
     val df = sql("select l_returnflag, l_linestatus, sum(l_quantity) as sum_qty, sum(l_extendedprice) as sum_base_price, sum(l_extendedprice*(1-l_discount)) as sum_disc_price, sum(l_extendedprice*(1-l_discount)*(1+l_tax)) as sum_charge,count(*) as count_order from lineitem where l_shipdate <= date('1998-09-02') group by l_returnflag, l_linestatus order by l_returnflag, l_linestatus")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
 //    checkAnswer(df, sql("select l_returnflag, l_linestatus, sum(l_quantity) as sum_qty, sum(l_extendedprice) as sum_base_price, sum(l_extendedprice*(1-l_discount)) as sum_disc_price, sum(l_extendedprice*(1-l_discount)*(1+l_tax)) as sum_charge,count(*) as count_order from lineitem1 where l_shipdate <= date('1998-09-02') group by l_returnflag, l_linestatus order by l_returnflag, l_linestatus"))
     sql(s"drop datamap datamap1")
   }
@@ -82,8 +75,7 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap2")
     sql("create datamap datamap2 using 'mv' as select l_returnflag, l_linestatus,l_shipdate, sum(l_quantity) as sum_qty, sum(l_extendedprice) as sum_base_price, sum(l_extendedprice*(1-l_discount)) as sum_disc_price, sum(l_extendedprice*(1-l_discount)*(1+l_tax)) as sum_charge from lineitem group by l_returnflag, l_linestatus,l_shipdate order by l_returnflag, l_linestatus")
     val df = sql("select l_returnflag, l_linestatus, sum(l_quantity) as sum_qty, sum(l_extendedprice) as sum_base_price, sum(l_extendedprice*(1-l_discount)) as sum_disc_price, sum(l_extendedprice*(1-l_discount)*(1+l_tax)) as sum_charge from lineitem where l_shipdate <= date('1998-09-02') group by l_returnflag, l_linestatus order by l_returnflag, l_linestatus")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap2"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap2"))
 //    checkAnswer(df, sql("select l_returnflag, l_linestatus, sum(l_quantity) as sum_qty, sum(l_extendedprice) as sum_base_price, sum(l_extendedprice*(1-l_discount)) as sum_disc_price, sum(l_extendedprice*(1-l_discount)*(1+l_tax)) as sum_charge from lineitem1 where l_shipdate <= date('1998-09-02') group by l_returnflag, l_linestatus order by l_returnflag, l_linestatus"))
     sql(s"drop datamap datamap2")
   }
@@ -92,8 +84,7 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap3")
     sql("create datamap datamap3 using 'mv' as select l_returnflag, l_linestatus,l_shipdate, sum(l_quantity) as sum_qty, sum(l_extendedprice) as sum_base_price, sum(l_extendedprice*(1-l_discount)) as sum_disc_price, sum(l_extendedprice*(1-l_discount)*(1+l_tax)) as sum_charge from lineitem group by l_returnflag, l_linestatus,l_shipdate")
     val df = sql("select l_returnflag, l_linestatus, sum(l_quantity) as sum_qty, sum(l_extendedprice) as sum_base_price, sum(l_extendedprice*(1-l_discount)) as sum_disc_price from lineitem where l_shipdate <= date('1998-09-02') group by l_returnflag, l_linestatus order by l_returnflag, l_linestatus")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap3"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap3"))
 //    checkAnswer(df, sql("select l_returnflag, l_linestatus, sum(l_quantity) as sum_qty, sum(l_extendedprice) as sum_base_price, sum(l_extendedprice*(1-l_discount)) as sum_disc_price from lineitem1 where l_shipdate <= date('1998-09-02') group by l_returnflag, l_linestatus order by l_returnflag, l_linestatus"))
     sql(s"drop datamap datamap3")
   }
@@ -102,8 +93,7 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap4")
     sql("create datamap datamap4 using 'mv' as select l_orderkey, sum(l_extendedprice * (1 - l_discount)) as revenue, o_orderdate, o_shippriority from customer, orders, lineitem where c_mktsegment = 'BUILDING' and c_custkey = o_custkey and l_orderkey = o_orderkey and o_orderdate < date('1995-03-15') and l_shipdate > date('1995-03-15') group by l_orderkey, o_orderdate, o_shippriority")
     val df = sql("select l_orderkey, sum(l_extendedprice * (1 - l_discount)) as revenue, o_orderdate, o_shippriority from customer, orders, lineitem where c_mktsegment = 'BUILDING' and c_custkey = o_custkey and l_orderkey = o_orderkey and o_orderdate < date('1995-03-15') and l_shipdate > date('1995-03-15') group by l_orderkey, o_orderdate, o_shippriority order by revenue desc, o_orderdate limit 10")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap4"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap4"))
 //    checkAnswer(df, sql("select l_orderkey, sum(l_extendedprice * (1 - l_discount)) as revenue, o_orderdate, o_shippriority from customer1, orders1, lineitem1 where c_mktsegment = 'BUILDING' and c_custkey = o_custkey and l_orderkey = o_orderkey and o_orderdate < date('1995-03-15') and l_shipdate > date('1995-03-15') group by l_orderkey, o_orderdate, o_shippriority order by revenue desc, o_orderdate limit 10"))
     sql(s"drop datamap datamap4")
   }
@@ -112,8 +102,7 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap5")
     sql("create datamap datamap5 using 'mv' as select l_orderkey, sum(l_extendedprice * (1 - l_discount)) as revenue, o_orderdate, o_shippriority,c_mktsegment,l_shipdate, c_custkey as c1, o_custkey as c2,o_orderkey as o1  from customer, orders, lineitem where c_custkey = o_custkey and l_orderkey = o_orderkey group by l_orderkey, o_orderdate, o_shippriority,c_mktsegment,l_shipdate,c_custkey,o_custkey, o_orderkey ")
     val df = sql("select l_orderkey, sum(l_extendedprice * (1 - l_discount)) as revenue, o_orderdate, o_shippriority from customer, orders, lineitem where c_mktsegment = 'BUILDING' and c_custkey = o_custkey and l_orderkey = o_orderkey and o_orderdate < date('1995-03-15') and l_shipdate > date('1995-03-15') group by l_orderkey, o_orderdate, o_shippriority order by revenue desc, o_orderdate limit 10")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap5"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap5"))
 //    checkAnswer(df, sql("select l_orderkey, sum(l_extendedprice * (1 - l_discount)) as revenue, o_orderdate, o_shippriority from customer1, orders1, lineitem1 where c_mktsegment = 'BUILDING' and c_custkey = o_custkey and l_orderkey = o_orderkey and o_orderdate < date('1995-03-15') and l_shipdate > date('1995-03-15') group by l_orderkey, o_orderdate, o_shippriority order by revenue desc, o_orderdate limit 10"))
     sql(s"drop datamap datamap5")
   }
@@ -122,8 +111,7 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap5")
     sql("create datamap datamap5 using 'mv' as select l_orderkey, sum(l_extendedprice * (1 - l_discount)) as revenue, o_orderdate, o_shippriority,c_mktsegment,l_shipdate from customer, orders, lineitem where c_mktsegment = 'BUILDING' and c_custkey = o_custkey and l_orderkey = o_orderkey and o_orderdate < date('1995-03-15') and l_shipdate > date('1995-03-15') group by l_orderkey, o_orderdate, o_shippriority,c_mktsegment,l_shipdate")
     val df = sql("select l_orderkey, sum(l_extendedprice * (1 - l_discount)) as revenue, o_orderdate, o_shippriority from customer, orders, lineitem where c_mktsegment = 'BUILDING' and c_custkey = o_custkey and l_orderkey = o_orderkey and o_orderdate < date('1995-03-15') and l_shipdate > date('1995-03-15') group by l_orderkey, o_orderdate, o_shippriority order by revenue desc, o_orderdate limit 10")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap5"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap5"))
 //    checkAnswer(df, sql("select l_orderkey, sum(l_extendedprice * (1 - l_discount)) as revenue, o_orderdate, o_shippriority from customer1, orders1, lineitem1 where c_mktsegment = 'BUILDING' and c_custkey = o_custkey and l_orderkey = o_orderkey and o_orderdate < date('1995-03-15') and l_shipdate > date('1995-03-15') group by l_orderkey, o_orderdate, o_shippriority order by revenue desc, o_orderdate limit 10"))
     sql(s"drop datamap datamap5")
   }
@@ -132,8 +120,7 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap6")
     sql("create datamap datamap6 using 'mv' as select o_orderpriority, count(*) as order_count from orders where o_orderdate >= date('1993-07-01') and o_orderdate < date('1993-10-01') and exists ( select * from lineitem where l_orderkey = o_orderkey and l_commitdate < l_receiptdate ) group by o_orderpriority order by o_orderpriority")
     val df = sql("select o_orderpriority, count(*) as order_count from orders where o_orderdate >= date('1993-07-01') and o_orderdate < date('1993-10-01') and exists ( select * from lineitem where l_orderkey = o_orderkey and l_commitdate < l_receiptdate ) group by o_orderpriority order by o_orderpriority")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap6"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap6"))
 //    checkAnswer(df, sql("select o_orderpriority, count(*) as order_count from orders1 where o_orderdate >= date('1993-07-01') and o_orderdate < date('1993-10-01') and exists ( select * from lineitem1 where l_orderkey = o_orderkey and l_commitdate < l_receiptdate ) group by o_orderpriority order by o_orderpriority"))
     sql(s"drop datamap datamap6")
   }
@@ -142,8 +129,7 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap7")
     sql("create datamap datamap7 using 'mv' as select n_name, sum(l_extendedprice * (1 - l_discount)) as revenue from customer, orders, lineitem, supplier, nation, region where c_custkey = o_custkey and l_orderkey = o_orderkey and l_suppkey = s_suppkey and c_nationkey = s_nationkey and s_nationkey = n_nationkey and n_regionkey = r_regionkey and r_name = 'ASIA' and o_orderdate >= date('1994-01-01') and o_orderdate < date('1995-01-01') group by n_name")
     val df = sql("select n_name, sum(l_extendedprice * (1 - l_discount)) as revenue from customer, orders, lineitem, supplier, nation, region where c_custkey = o_custkey and l_orderkey = o_orderkey and l_suppkey = s_suppkey and c_nationkey = s_nationkey and s_nationkey = n_nationkey and n_regionkey = r_regionkey and r_name = 'ASIA' and o_orderdate >= date('1994-01-01') and o_orderdate < date('1995-01-01') group by n_name order by revenue desc")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap7"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap7"))
 //    checkAnswer(df, sql("select n_name, sum(l_extendedprice * (1 - l_discount)) as revenue from customer1, orders1, lineitem1, supplier1, nation1, region1 where c_custkey = o_custkey and l_orderkey = o_orderkey and l_suppkey = s_suppkey and c_nationkey = s_nationkey and s_nationkey = n_nationkey and n_regionkey = r_regionkey and r_name = 'ASIA' and o_orderdate >= date('1994-01-01') and o_orderdate < date('1995-01-01') group by n_name order by revenue desc"))
     sql(s"drop datamap datamap7")
   }
@@ -152,8 +138,7 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap8")
     sql("create datamap datamap8 using 'mv' as select n_name,o_orderdate,r_name, sum(l_extendedprice * (1 - l_discount)) as revenue, sum(c_custkey), sum(o_custkey), sum(l_orderkey),sum(o_orderkey), sum(l_suppkey), sum(s_suppkey), sum(c_nationkey), sum(s_nationkey), sum(n_nationkey), sum(n_regionkey), sum(r_regionkey)  from customer, orders, lineitem, supplier, nation, region where c_custkey = o_custkey and l_orderkey = o_orderkey and l_suppkey = s_suppkey and c_nationkey = s_nationkey an [...]
     val df = sql("select n_name, sum(l_extendedprice * (1 - l_discount)) as revenue from customer, orders, lineitem, supplier, nation, region where c_custkey = o_custkey and l_orderkey = o_orderkey and l_suppkey = s_suppkey and c_nationkey = s_nationkey and s_nationkey = n_nationkey and n_regionkey = r_regionkey and r_name = 'ASIA' and o_orderdate >= date('1994-01-01') and o_orderdate < date('1995-01-01') group by n_name order by revenue desc")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap8"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap8"))
 //    checkAnswer(df, sql("select n_name, sum(l_extendedprice * (1 - l_discount)) as revenue from customer1, orders1, lineitem1, supplier1, nation1, region1 where c_custkey = o_custkey and l_orderkey = o_orderkey and l_suppkey = s_suppkey and c_nationkey = s_nationkey and s_nationkey = n_nationkey and n_regionkey = r_regionkey and r_name = 'ASIA' and o_orderdate >= date('1994-01-01') and o_orderdate < date('1995-01-01') group by n_name order by revenue desc"))
     sql(s"drop datamap datamap8")
   }
@@ -162,9 +147,8 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap9")
     sql("create datamap datamap9 using 'mv' as select sum(l_extendedprice * l_discount) as revenue, count(l_shipdate), sum(l_discount),sum(l_quantity)  from lineitem where l_shipdate >= date('1994-01-01') and l_shipdate < date('1995-01-01') and l_discount between 0.05 and 0.07 and l_quantity < 24")
     val df = sql("select sum(l_extendedprice * l_discount) as revenue from lineitem where l_shipdate >= date('1994-01-01') and l_shipdate < date('1995-01-01') and l_discount between 0.05 and 0.07 and l_quantity < 24")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap9"))
-    assert(verifyAgg(analyzed))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap9"))
+    assert(verifyAgg(df.queryExecution.optimizedPlan))
 //    checkAnswer(df, sql("select sum(l_extendedprice * l_discount) as revenue from lineitem1 where l_shipdate >= date('1994-01-01') and l_shipdate < date('1995-01-01') and l_discount between 0.05 and 0.07 and l_quantity < 24"))
     sql(s"drop datamap datamap9")
   }
@@ -173,9 +157,8 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap10")
     sql("create datamap datamap10 using 'mv' as select sum(l_extendedprice * l_discount) as revenue,l_shipdate,l_discount,l_quantity from lineitem group by l_shipdate,l_discount,l_quantity")
     val df = sql("select sum(l_extendedprice * l_discount) as revenue from lineitem where l_shipdate >= date('1994-01-01') and l_shipdate < date('1995-01-01') and l_discount between 0.05 and 0.07 and l_quantity < 24")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap10"))
-    assert(verifyAgg(analyzed))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap10"))
+    assert(verifyAgg(df.queryExecution.optimizedPlan))
 //    checkAnswer(df, sql("select sum(l_extendedprice * l_discount) as revenue from lineitem1 where l_shipdate >= date('1994-01-01') and l_shipdate < date('1995-01-01') and l_discount between 0.05 and 0.07 and l_quantity < 24"))
     sql(s"drop datamap datamap10")
   }
@@ -184,8 +167,7 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap11")
     sql("create datamap datamap11 using 'mv' as select l_shipdate,n_name , l_extendedprice , l_discount, s_suppkey,l_suppkey, o_orderkey,l_orderkey, c_custkey, o_custkey, s_nationkey,  n1.n_nationkey, c_nationkey from supplier,lineitem,orders,customer,nation n1 where s_suppkey = l_suppkey and o_orderkey = l_orderkey and c_custkey = o_custkey and s_nationkey = n1.n_nationkey and c_nationkey = n1.n_nationkey")
     val df = sql("select year(l_shipdate) as l_year, l_extendedprice * (1 - l_discount) as volume from supplier,lineitem,orders,customer,nation n1 where s_suppkey = l_suppkey and o_orderkey = l_orderkey and c_custkey = o_custkey and s_nationkey = n1.n_nationkey and c_nationkey = n1.n_nationkey and ( (n1.n_name = 'FRANCE') or (n1.n_name = 'GERMANY') ) and l_shipdate between date('1995-01-01') and date('1996-12-31')")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap11"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap11"))
 //    checkAnswer(df, sql("select year(l_shipdate) as l_year, l_extendedprice * (1 - l_discount) as volume from supplier1,lineitem1,orders1,customer1,nation1 n1 where s_suppkey = l_suppkey and o_orderkey = l_orderkey and c_custkey = o_custkey and s_nationkey = n1.n_nationkey and c_nationkey = n1.n_nationkey and ( (n1.n_name = 'FRANCE') or (n1.n_name = 'GERMANY') ) and l_shipdate between date('1995-01-01') and date('1996-12-31')"))
     sql(s"drop datamap datamap11")
   }
@@ -194,8 +176,7 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap12")
     sql("create datamap datamap12 using 'mv' as select n1.n_name, l_shipdate, l_extendedprice ,l_discount,s_suppkey, l_suppkey,o_orderkey,l_orderkey, c_custkey,o_custkey,s_nationkey,  n1.n_nationkey,c_nationkey from supplier,lineitem,orders,customer,nation n1 where s_suppkey = l_suppkey and o_orderkey = l_orderkey and c_custkey = o_custkey and s_nationkey = n1.n_nationkey and c_nationkey = n1.n_nationkey")
     val df = sql("select supp_nation, l_year, sum(volume) as revenue from ( select n1.n_name as supp_nation, year(l_shipdate) as l_year, l_extendedprice * (1 - l_discount) as volume from supplier,lineitem,orders,customer,nation n1 where s_suppkey = l_suppkey and o_orderkey = l_orderkey and c_custkey = o_custkey and s_nationkey = n1.n_nationkey and c_nationkey = n1.n_nationkey and ( (n1.n_name = 'FRANCE' ) or (n1.n_name = 'GERMANY') ) and l_shipdate between date('1995-01-01') and date('19 [...]
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap12"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap12"))
 //    checkAnswer(df, sql("select supp_nation, l_year, sum(volume) as revenue from ( select n1.n_name as supp_nation, year(l_shipdate) as l_year, l_extendedprice * (1 - l_discount) as volume from supplier1,lineitem1,orders1,customer1,nation1 n1 where s_suppkey = l_suppkey and o_orderkey = l_orderkey and c_custkey = o_custkey and s_nationkey = n1.n_nationkey and c_nationkey = n1.n_nationkey and ( (n1.n_name = 'FRANCE' ) or (n1.n_name = 'GERMANY') ) and l_shipdate between date('1995-01-01' [...]
     sql(s"drop datamap datamap12")
   }
@@ -204,16 +185,15 @@ class MVTpchTestCase extends CarbonQueryTest with BeforeAndAfterAll {
     sql(s"drop datamap if exists datamap13")
     sql("create datamap datamap13 using 'mv' as select n1.n_name as supp_nation, n2.n_name as cust_nation, l_shipdate, l_extendedprice * (1 - l_discount) as volume from supplier,lineitem,orders,customer,nation n1,nation n2 where s_suppkey = l_suppkey and o_orderkey = l_orderkey and c_custkey = o_custkey and s_nationkey = n1.n_nationkey and c_nationkey = n2.n_nationkey")
     val df = sql("select supp_nation, cust_nation, l_year, sum(volume) as revenue from ( select n1.n_name as supp_nation, n2.n_name as cust_nation, year(l_shipdate) as l_year, l_extendedprice * (1 - l_discount) as volume from supplier,lineitem,orders,customer,nation n1,nation n2 where s_suppkey = l_suppkey and o_orderkey = l_orderkey and c_custkey = o_custkey and s_nationkey = n1.n_nationkey and c_nationkey = n2.n_nationkey and ( (n1.n_name = 'FRANCE' and n2.n_name = 'GERMANY') or (n1.n_ [...]
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap13"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap13"))
 //    checkAnswer(df, sql("select supp_nation, cust_nation, l_year, sum(volume) as revenue from ( select n1.n_name as supp_nation, n2.n_name as cust_nation, year(l_shipdate) as l_year, l_extendedprice * (1 - l_discount) as volume from supplier,lineitem1,orders1,customer1,nation1 n1,nation1 n2 where s_suppkey = l_suppkey and o_orderkey = l_orderkey and c_custkey = o_custkey and s_nationkey = n1.n_nationkey and c_nationkey = n2.n_nationkey and ( (n1.n_name = 'FRANCE' and n2.n_name = 'GERMA [...]
     sql(s"drop datamap datamap13")
   }
 
   def verifyAgg(logicalPlan: LogicalPlan): Boolean = {
     var aggExpExists = false
-    logicalPlan transformExpressions {
-      case a:AggregateExpression =>
+    logicalPlan transform {
+      case a:Aggregate =>
         aggExpExists = true
         a
     }
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectAllColumnsSuite.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectAllColumnsSuite.scala
index 5344939..d7c6187 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectAllColumnsSuite.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/SelectAllColumnsSuite.scala
@@ -17,16 +17,14 @@
 package org.apache.carbondata.mv.rewrite
 
 import org.apache.spark.sql.Row
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 
-class SelectAllColumnsSuite extends CarbonQueryTest {
+class SelectAllColumnsSuite extends QueryTest {
 
   test ("table select all columns mv") {
     sql("drop datamap if exists all_table_mv")
     sql("drop table if exists all_table")
-    sql("create table all_table(name string, age int, height int)  stored by 'carbondata'")
+    sql("create table all_table(name string, age int, height int) STORED AS carbondata")
     sql("insert into all_table select 'tom',20,175")
     sql("insert into all_table select 'tom',32,180")
     sql("create datamap all_table_mv on table all_table using 'mv' as select avg(age),avg(height),name from all_table group by name")
@@ -35,8 +33,7 @@ class SelectAllColumnsSuite extends CarbonQueryTest {
       sql("select avg(age),avg(height),name from all_table group by name"),
       Seq(Row(26.0, 177.5, "tom")))
     val frame = sql("select avg(age),avg(height),name from all_table group by name")
-    val analyzed = frame.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "all_table_mv"))
+    assert(TestUtil.verifyMVDataMap(frame.queryExecution.optimizedPlan, "all_table_mv"))
     sql("drop table if exists all_table")
   }
 
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
index 3c0ced0..66c8a0f 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestAllOperationsOnMV.scala
@@ -22,7 +22,7 @@ import java.util
 
 import org.apache.spark.sql.{AnalysisException, CarbonEnv, Row}
 import org.apache.spark.sql.catalyst.TableIdentifier
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterEach
 
 import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
@@ -34,14 +34,14 @@ import org.apache.carbondata.spark.exception.ProcessMetaDataException
 /**
  * Test Class for MV Datamap to verify all scenerios
  */
-class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
+class TestAllOperationsOnMV extends QueryTest with BeforeAndAfterEach {
 
   override def beforeEach(): Unit = {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop table IF EXISTS testtable")
-    sql("create table testtable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table testtable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table testtable select 'abc',21,2000")
     sql("drop datamap if exists dm1")
     sql("create datamap dm1 using 'mv' WITH DEFERRED REBUILD as select name,sum(price) " +
@@ -146,12 +146,12 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test table properties") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata' tblproperties('LOCAL_DICTIONARY_ENABLE'='false')")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata tblproperties('LOCAL_DICTIONARY_ENABLE'='false')")
     sql("drop datamap if exists dm1")
     sql("create datamap dm1  using 'mv' WITH DEFERRED REBUILD as select name,price from maintable")
     checkExistence(sql("describe formatted dm1_table"), true, "Local Dictionary Enabled false")
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata' tblproperties('TABLE_BLOCKSIZE'='256 MB')")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata tblproperties('TABLE_BLOCKSIZE'='256 MB')")
     sql("drop datamap if exists dm1")
     sql("create datamap dm1  using 'mv' WITH DEFERRED REBUILD as select name,price from maintable")
     checkExistence(sql("describe formatted dm1_table"), true, "Table Block Size  256 MB")
@@ -159,7 +159,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test delete segment by id on main table") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("insert into table maintable select 'abc',21,2000")
     sql("Delete from table maintable where segment.id in (0)")
@@ -177,7 +177,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test delete segment by date on main table") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("insert into table maintable select 'abc',21,2000")
     sql("Delete from table maintable where segment.id in (0)")
@@ -195,7 +195,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test direct load to mv datamap table") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm1")
     sql("create datamap dm1 using 'mv' WITH DEFERRED REBUILD as select name " +
@@ -209,7 +209,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test drop datamap with tablename") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm1 on table maintable")
     sql("create datamap dm1 using 'mv' WITH DEFERRED REBUILD as select price " +
@@ -224,7 +224,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test mv with attribute having qualifier") {
     sql("drop table if exists maintable")
-    sql("create table maintable (product string) partitioned by (amount int) stored by 'carbondata' ")
+    sql("create table maintable (product string) partitioned by (amount int) STORED AS carbondata ")
     sql("insert into maintable values('Mobile',2000)")
     sql("drop datamap if exists p")
     sql("Create datamap p using 'mv' as Select p.product, p.amount from maintable p where p.product = 'Mobile'")
@@ -247,7 +247,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
   //Test show datamap
   test("test datamap status with single table") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm1 ")
     sql("create datamap dm1 using 'mv' WITH DEFERRED REBUILD as select price from maintable")
@@ -267,10 +267,10 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test datamap status with multiple tables") {
     sql("drop table if exists products")
-    sql("create table products (product string, amount int) stored by 'carbondata' ")
+    sql("create table products (product string, amount int) STORED AS carbondata ")
     sql(s"load data INPATH '$resourcesPath/products.csv' into table products")
     sql("drop table if exists sales")
-    sql("create table sales (product string, quantity int) stored by 'carbondata'")
+    sql("create table sales (product string, quantity int) STORED AS carbondata")
     sql(s"load data INPATH '$resourcesPath/sales_data.csv' into table sales")
     sql("drop datamap if exists innerjoin")
     sql(
@@ -299,7 +299,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("directly drop datamap table") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm1 ")
     sql("create datamap dm1 using 'mv' WITH DEFERRED REBUILD as select price from maintable")
@@ -311,7 +311,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("create datamap on child table") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm1 ")
     sql("create datamap dm1 using 'mv' as select name, price from maintable")
@@ -322,7 +322,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("create datamap if already exists") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm1 ")
     sql("create datamap dm1 using 'mv' as select name from maintable")
@@ -334,7 +334,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test create datamap with select query having 'like' expression") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("select name from maintable where name like '%b%'").show(false)
     sql("drop datamap if exists dm_like ")
@@ -345,7 +345,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test datamap with streaming dmproperty") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm ")
     intercept[MalformedCarbonCommandException] {
@@ -356,7 +356,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test set streaming after creating datamap table") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm ")
     sql("create datamap dm using 'mv' as select name from maintable")
@@ -368,7 +368,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test block complex data types") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code array<int>, price struct<b:int>,type map<string, string>) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code array<int>, price struct<b:int>,type map<string, string>) STORED AS carbondata")
     sql("insert into table maintable values('abc', array(21), named_struct('b', 2000), map('ab','type1'))")
     sql("drop datamap if exists dm ")
     intercept[UnsupportedOperationException] {
@@ -388,7 +388,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("validate dmproperties") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm ")
     intercept[MalformedCarbonCommandException] {
@@ -398,7 +398,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test todate UDF function with mv") {
     sql("drop table IF EXISTS maintable")
-    sql("CREATE TABLE maintable (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'")
+    sql("CREATE TABLE maintable (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata")
   sql("insert into maintable values(1, 'abc', 'abc001', '1975-06-11 01:00:03.0','1975-06-11 02:00:03.0', 120, 1234,4.34,24.56,12345, 2464, 45)")
     sql("drop datamap if exists dm ")
     sql("create datamap dm using 'mv' as select max(to_date(dob)) , min(to_date(dob)) from maintable where to_date(dob)='1975-06-11' or to_date(dob)='1975-06-23'")
@@ -407,7 +407,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test preagg and mv") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm_mv ")
     sql("create datamap dm_mv using 'mv' as select name, sum(price) from maintable group by name")
@@ -419,7 +419,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test inverted index  & no-inverted index inherited from parent table") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata' tblproperties('sort_columns'='name', 'inverted_index'='name','sort_scope'='local_sort')")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata tblproperties('sort_columns'='name', 'inverted_index'='name','sort_scope'='local_sort')")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm ")
     sql("create datamap dm using 'mv' as select name, sum(price) from maintable group by name")
@@ -430,7 +430,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test column compressor on preagg and mv") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata' tblproperties('carbon.column.compressor'='zstd')")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata tblproperties('carbon.column.compressor'='zstd')")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm_mv ")
     sql("create datamap dm_mv on table maintable using 'mv' as select name, sum(price) from maintable group by name")
@@ -441,7 +441,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test sort_scope if sort_columns are provided") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata' tblproperties('sort_columns'='name')")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata tblproperties('sort_columns'='name')")
     sql("insert into table maintable select 'abc',21,2000")
     sql("create datamap dm_mv on table maintable using 'mv' as select name, sum(price) from maintable group by name")
     checkExistence(sql("describe formatted dm_mv_table"), true, "Sort Scope LOCAL_SORT")
@@ -450,7 +450,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test inverted_index if sort_scope is provided") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata' tblproperties('sort_scope'='no_sort','sort_columns'='name', 'inverted_index'='name')")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata tblproperties('sort_scope'='no_sort','sort_columns'='name', 'inverted_index'='name')")
     sql("insert into table maintable select 'abc',21,2000")
     checkExistence(sql("describe formatted maintable"), true, "Inverted Index Columns name")
     sql("create datamap dm_mv on table maintable using 'mv' as select name, sum(price) from maintable group by name")
@@ -461,13 +461,13 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
   test("test sort column") {
     sql("drop table IF EXISTS maintable")
     intercept[MalformedCarbonCommandException] {
-      sql("create table maintable(name string, c_code int, price int) stored by 'carbondata' tblproperties('sort_scope'='local_sort','sort_columns'='')")
+      sql("create table maintable(name string, c_code int, price int) STORED AS carbondata tblproperties('sort_scope'='local_sort','sort_columns'='')")
     }.getMessage.contains("Cannot set SORT_COLUMNS as empty when SORT_SCOPE is LOCAL_SORT")
   }
 
   test("test delete on datamap table") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata' tblproperties('sort_scope'='no_sort','sort_columns'='name', 'inverted_index'='name')")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata tblproperties('sort_scope'='no_sort','sort_columns'='name', 'inverted_index'='name')")
     sql("insert into table maintable select 'abc',21,2000")
     sql("create datamap dm_mv on table maintable using 'mv' as select name, sum(price) from maintable group by name")
     intercept[UnsupportedOperationException] {
@@ -478,7 +478,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test drop/show meta cache directly on mv datamap table") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm ")
     sql("create datamap dm using 'mv' as select name, sum(price) from maintable group by name")
@@ -495,7 +495,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
     sql("drop table if exists maintable")
     sql("create table maintable(id int, name string, id1 string, id2 string, dob timestamp, doj " +
         "timestamp, v1 bigint, v2 bigint, v3 decimal(30,10), v4 decimal(20,10), v5 double, v6 " +
-        "double ) stored by 'carbondata'")
+        "double ) STORED AS carbondata")
     sql("insert into maintable values(1, 'abc', 'id001', 'id002', '2017-01-01 00:00:00','2017-01-01 " +
         "00:00:00', 234, 2242,12.4,23.4,2323,455 )")
     checkAnswer(sql("select count(*) from maintable where  id1 < id2"), Seq(Row(1)))
@@ -504,18 +504,18 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test mv with filter instance of expression") {
     sql("drop table IF EXISTS maintable")
-    sql("CREATE TABLE maintable (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB date, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'")
+    sql("CREATE TABLE maintable (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB date, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata")
     sql("insert into maintable values(1, 'abc', 'abc001', '1975-06-11','1975-06-11 02:00:03.0', 120, 1234,4.34,24.56,12345, 2464, 45)")
     sql("drop datamap if exists dm ")
     sql("create datamap dm using 'mv' as select dob from maintable where (dob='1975-06-11' or cust_id=2)")
     val df = sql("select dob from maintable where (dob='1975-06-11' or cust_id=2)")
-    TestUtil.verifyMVDataMap(df.queryExecution.analyzed, "dm")
+    TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "dm")
     sql("drop table IF EXISTS maintable")
   }
 
   test("test histogram_numeric, collect_set & collect_list functions") {
     sql("drop table IF EXISTS maintable")
-    sql("CREATE TABLE maintable (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format'")
+    sql("CREATE TABLE maintable (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata")
     sql("insert into maintable values(1, 'abc', 'abc001', '1975-06-11 01:00:03.0','1975-06-11 02:00:03.0', 120, 1234,4.34,24.56,12345, 2464, 45)")
     sql("drop datamap if exists dm ")
     intercept[UnsupportedOperationException] {
@@ -532,26 +532,26 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("test query aggregation on mv datamap ") {
     sql("drop table if exists maintable")
-    sql("create table maintable(name string, age int, add string) stored by 'carbondata'")
+    sql("create table maintable(name string, age int, add string) STORED AS carbondata")
     sql("insert into maintable values('abc',1,'a'),('def',2,'b'),('ghi',3,'c')")
     val res = sql("select sum(age) from maintable")
     sql("drop datamap if exists mv3")
     sql("create datamap mv3 on table maintable using 'mv' as select age,sum(age) from maintable group by age")
     val df = sql("select sum(age) from maintable")
-    TestUtil.verifyMVDataMap(df.queryExecution.analyzed, "mv3")
+    TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "mv3")
     checkAnswer(res, df)
     sql("drop table if exists maintable")
   }
 
   test("test order by columns not given in projection") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     val res = sql("select name from maintable order by c_code")
     sql("drop datamap if exists dm1")
     sql("create datamap dm1 using 'mv' as select name from maintable order by c_code")
     val df = sql("select name from maintable order by c_code")
-    TestUtil.verifyMVDataMap(df.queryExecution.analyzed, "dm1")
+    TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "dm1")
     checkAnswer(res, df)
     intercept[Exception] {
       sql("alter table maintable drop columns(c_code)")
@@ -561,7 +561,7 @@ class TestAllOperationsOnMV extends CarbonQueryTest with BeforeAndAfterEach {
 
   test("drop meta cache on mv datamap table") {
     sql("drop table IF EXISTS maintable")
-    sql("create table maintable(name string, c_code int, price int) stored by 'carbondata'")
+    sql("create table maintable(name string, c_code int, price int) STORED AS carbondata")
     sql("insert into table maintable select 'abc',21,2000")
     sql("drop datamap if exists dm ")
     sql("create datamap dm using 'mv' as select name, sum(price) from maintable group by name")
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestPartitionWithMV.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestPartitionWithMV.scala
index 08cfdaf..92323c4 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestPartitionWithMV.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestPartitionWithMV.scala
@@ -19,9 +19,7 @@ package org.apache.carbondata.mv.rewrite
 
 import scala.collection.JavaConverters._
 
-import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
-import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.apache.spark.sql.{CarbonEnv, Row}
 import org.scalatest.BeforeAndAfterAll
 
@@ -30,7 +28,7 @@ import org.apache.carbondata.core.datastore.impl.FileFactory
 /**
  * Test class for MV to verify partition scenarios
  */
-class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
+class TestPartitionWithMV extends QueryTest with BeforeAndAfterAll {
 
   val testData = s"$resourcesPath/sample.csv"
 
@@ -41,12 +39,12 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
     sql(
       """
         | CREATE TABLE par(id INT, name STRING, age INT) PARTITIONED BY(city STRING)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(
       """
         | CREATE TABLE maintable(id int, name string, city string) partitioned by (age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(s"LOAD DATA LOCAL INPATH '$testData' into table maintable")
   }
@@ -87,7 +85,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 on table partitionone using 'mv' as select empname, year, sum(year),month,day from partitionone group by empname, year, month,day")
@@ -97,8 +95,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
     checkAnswer(sql("select * from p1_table"), Seq(Row("v",2014,2014,1,1)))
     checkAnswer(sql("select empname, sum(year) from partitionone group by empname, year, month,day"), Seq(Row("v", 2014)))
     val df1 = sql(s"select empname, sum(year) from partitionone group by empname, year, month,day")
-    val analyzed1 = df1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "p1"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "p1"))
     assert(CarbonEnv.getCarbonTable(Some("partition_mv"), "p1_table")(sqlContext.sparkSession).isHivePartitionTable)
   }
 
@@ -108,7 +105,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 on table partitionone using 'mv' as select empname, year, sum(year),month,day from partitionone group by empname, year, month,day")
@@ -116,8 +113,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
     sql("insert overwrite table partitionone values('v',2,2015,1,1)")
     checkAnswer(sql("select * from partitionone"), Seq(Row("k",2,2014,1,1), Row("v",2,2015,1,1)))
     val df1 = sql(s"select empname, sum(year) from partitionone group by empname, year, month,day")
-    val analyzed1 = df1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "p1"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "p1"))
     checkAnswer(sql("select * from p1_table"), Seq(Row("k",2014,2014,1,1), Row("v",2015,2015,1,1)))
   }
 
@@ -127,7 +123,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 on table partitionone using 'mv' as select empname,  year, sum(year),month,day from partitionone group by empname, month, year,day")
@@ -149,7 +145,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 on table partitionone using 'mv' as select empname,  year, sum(year),month,day from partitionone group by empname, year, month,day")
@@ -170,7 +166,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 on table partitionone using 'mv' as select empname,  year, sum(year),month,day from partitionone group by empname, year, month,day")
@@ -191,7 +187,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 on table partitionone using 'mv' as select empname,  year, sum(year),month,day from partitionone group by empname, year, month, day")
@@ -211,7 +207,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 on table partitionone using 'mv' as select empname,  year, sum(year),month,day from partitionone group by empname, year, month, day")
@@ -231,7 +227,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists droppartition (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 using 'mv' as select empname,  year, sum(year),month,day from droppartition group by empname, year, month, day")
@@ -259,7 +255,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 on table partitionone using 'mv' as select empname,  year, sum(year),month,day from partitionone group by empname, year, month, day")
@@ -279,7 +275,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String,age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 on table partitionone using 'mv' as select empname,  year, sum(year),month,day from partitionone group by empname, year, month, day")
@@ -299,7 +295,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 on table partitionone using 'mv' as select empname,  year, sum(year),month,day from partitionone group by empname, year, month, day")
@@ -319,7 +315,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 on table partitionone using 'mv' as select empname,  year, sum(year),month,day from partitionone group by empname, year, month, day")
@@ -340,7 +336,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("create datamap p1 on table partitionone using 'mv' as select empname,  year, sum(year),month,day from partitionone group by empname, year, month, day")
@@ -360,7 +356,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String,age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("drop datamap if exists p2")
@@ -385,7 +381,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("drop datamap if exists p2")
@@ -413,7 +409,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql(
@@ -433,7 +429,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, age int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql("drop datamap if exists p2")
@@ -458,7 +454,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql(
@@ -476,7 +472,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql(
@@ -494,7 +490,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql(
@@ -517,7 +513,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, id int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql("drop datamap if exists p1")
     sql(
@@ -530,7 +526,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
   test("test dropping partition which has already been deleted") {
     sql("drop table if exists partitiontable")
     sql("create table partitiontable(id int,name string) partitioned by (email string) " +
-        "stored by 'carbondata' tblproperties('sort_scope'='global_sort')")
+        "STORED AS carbondata tblproperties('sort_scope'='global_sort')")
     sql("insert into table partitiontable select 1,'huawei','abc'")
     sql("create datamap ag1 on table partitiontable using 'mv' as select count(email),id" +
         " from partitiontable group by id")
@@ -559,7 +555,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
   test("test mv table creation with count(*) on Partition table") {
     sql("drop table if exists partitiontable")
     sql("create table partitiontable(id int,name string) partitioned by (email string) " +
-        "stored by 'carbondata' tblproperties('sort_scope'='global_sort')")
+        "STORED AS carbondata tblproperties('sort_scope'='global_sort')")
     sql("insert into table partitiontable select 1,'huawei','abc'")
     sql("drop datamap if exists ag1")
     sql("create datamap ag1 on table partitiontable using 'mv' as select count(*),id" +
@@ -572,7 +568,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
   test("test blocking partitioning of mv table") {
     sql("drop table if exists updatetime_8")
     sql("create table updatetime_8" +
-        "(countryid smallint,hs_len smallint,minstartdate string,startdate string,newdate string,minnewdate string) partitioned by (imex smallint) stored by 'carbondata' tblproperties('sort_scope'='global_sort','sort_columns'='countryid,imex,hs_len,minstartdate,startdate,newdate,minnewdate','table_blocksize'='256')")
+        "(countryid smallint,hs_len smallint,minstartdate string,startdate string,newdate string,minnewdate string) partitioned by (imex smallint) STORED AS carbondata tblproperties('sort_scope'='global_sort','sort_columns'='countryid,imex,hs_len,minstartdate,startdate,newdate,minnewdate','table_blocksize'='256')")
     sql("drop datamap if exists ag")
     sql("create datamap ag on table updatetime_8 using 'mv' dmproperties('partitioning'='false') as select imex,sum(hs_len) from updatetime_8 group by imex")
     val carbonTable = CarbonEnv.getCarbonTable(Some("partition_mv"), "ag_table")(sqlContext.sparkSession)
@@ -586,7 +582,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       "create table partitionallcompaction(empno int,empname String,designation String," +
       "workgroupcategory int,workgroupcategoryname String,deptno int,projectjoindate timestamp," +
       "projectenddate date,attendance int,utilization int,salary int) partitioned by (deptname " +
-      "String,doj timestamp,projectcode int) stored  by 'carbondata' tblproperties" +
+      "String,doj timestamp,projectcode int) STORED AS carbondata tblproperties" +
       "('sort_scope'='global_sort')")
     sql(
       "create datamap sensor_1 on table partitionallcompaction using 'mv' as select " +
@@ -627,7 +623,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
   test("Test data updation in Aggregate query after compaction on Partitioned table with mv table") {
     sql("drop table if exists updatetime_8")
     sql("create table updatetime_8" +
-        "(countryid smallint,hs_len smallint,minstartdate string,startdate string,newdate string,minnewdate string) partitioned by (imex smallint) stored by 'carbondata' tblproperties('sort_scope'='global_sort','sort_columns'='countryid,imex,hs_len,minstartdate,startdate,newdate,minnewdate','table_blocksize'='256')")
+        "(countryid smallint,hs_len smallint,minstartdate string,startdate string,newdate string,minnewdate string) partitioned by (imex smallint) STORED AS carbondata tblproperties('sort_scope'='global_sort','sort_columns'='countryid,imex,hs_len,minstartdate,startdate,newdate,minnewdate','table_blocksize'='256')")
     sql("drop datamap if exists ag")
     sql("create datamap ag on table updatetime_8 using 'mv' as select sum(hs_len), imex from updatetime_8 group by imex")
     sql("insert into updatetime_8 select 21,20,'fbv','gbv','wvsw','vwr',23")
@@ -649,7 +645,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
       """
         | CREATE TABLE if not exists partitionone (empname String, id int)
         | PARTITIONED BY (year int, month int,day int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
     sql(
       "create datamap p7 on table partitionone using 'mv' as select empname, year, day, sum(year), sum(day) from partitionone group by empname, year, day")
@@ -677,7 +673,7 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
 
   test("test partition at last column") {
     sql("drop table if exists partitionone")
-    sql("create table partitionone(a int,b int) partitioned by (c int) stored by 'carbondata'")
+    sql("create table partitionone(a int,b int) partitioned by (c int) STORED AS carbondata")
     sql("insert into partitionone values(1,2,3)")
     sql("drop datamap if exists dm1")
     sql("create datamap dm1 on table partitionone using 'mv' as select c,sum(b) from partitionone group by c")
@@ -687,19 +683,20 @@ class TestPartitionWithMV extends CarbonQueryTest with BeforeAndAfterAll {
 
   test("test partition on timeseries column") {
     sql("drop table if exists partitionone")
-    sql("create table partitionone(a int,b int) partitioned by (c timestamp,d timestamp) stored by 'carbondata'")
+    sql("create table partitionone(a int,b int) partitioned by (c timestamp,d timestamp) STORED AS carbondata")
     sql("insert into partitionone values(1,2,'2017-01-01 01:00:00','2018-01-01 01:00:00')")
     sql("drop datamap if exists dm1")
+    sql("describe formatted partitionone").show(100, false)
     sql("create datamap dm1 on table partitionone using 'mv' as select timeseries(c,'day'),sum(b) from partitionone group by timeseries(c,'day')")
     assert(!CarbonEnv.getCarbonTable(Some("partition_mv"),"dm1_table")(sqlContext.sparkSession).isHivePartitionTable)
-    assert(sql("select timeseries(c,'day'),sum(b) from partitionone group by timeseries(c,'day')").count() == 1)
+    assert(sql("select timeseries(c,'day'),sum(b) from partitionone group by timeseries(c,'day')").collect().length  == 1)
     sql("drop table if exists partitionone")
-    sql("create table partitionone(a int,b timestamp) partitioned by (c timestamp) stored by 'carbondata'")
+    sql("create table partitionone(a int,b timestamp) partitioned by (c timestamp) STORED AS carbondata")
     sql("insert into partitionone values(1,'2017-01-01 01:00:00','2018-01-01 01:00:00')")
     sql("drop datamap if exists dm1")
     sql("create datamap dm1 on table partitionone using 'mv' as select timeseries(b,'day'),c from partitionone group by timeseries(b,'day'),c")
     assert(CarbonEnv.getCarbonTable(Some("partition_mv"),"dm1_table")(sqlContext.sparkSession).isHivePartitionTable)
-    assert(sql("select timeseries(b,'day'),c from partitionone group by timeseries(b,'day'),c").count() == 1)
+    assert(sql("select timeseries(b,'day'),c from partitionone group by timeseries(b,'day'),c").collect().length == 1)
     sql("drop table if exists partitionone")
   }
 
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestSQLSuite.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestSQLSuite.scala
index bc38adf..c6c7510 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestSQLSuite.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/TestSQLSuite.scala
@@ -21,7 +21,6 @@ import org.apache.spark.sql.catalyst.util._
 import org.apache.spark.sql.hive.CarbonSessionCatalogUtil
 import org.scalatest.BeforeAndAfter
 import org.apache.carbondata.mv.testutil.ModularPlanTest
-import org.apache.spark.sql.util.SparkSQLUtil
 
 class TestSQLSuite extends ModularPlanTest with BeforeAndAfter { 
   import org.apache.carbondata.mv.rewrite.matching.TestSQLBatch._
@@ -82,7 +81,7 @@ class TestSQLSuite extends ModularPlanTest with BeforeAndAfter {
         mvSession.registerSummaryDataset(summary)
         val rewrittenSQL =
           mvSession.mvSession.rewrite(mvSession.mvSession.sparkSession.sql(
-            testcase._3).queryExecution.analyzed).toCompactSQL.trim
+            testcase._3).queryExecution.optimizedPlan).toCompactSQL.trim
 
         if (!rewrittenSQL.trim.equals(testcase._4)) {
           fail(
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/Tpcds_1_4_Suite.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/Tpcds_1_4_Suite.scala
index 3d5f168..ee6445a 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/Tpcds_1_4_Suite.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/rewrite/Tpcds_1_4_Suite.scala
@@ -55,7 +55,7 @@ class Tpcds_1_4_Suite extends ModularPlanTest with BeforeAndAfter {
 
         writer.print(s"\n\n==== ${testcase._1} ====\n\n==== mv ====\n\n${testcase._2}\n\n==== original query ====\n\n${testcase._3}\n")
         
-        val rewriteSQL = mvSession.mvSession.rewriteToSQL(mvSession.mvSession.sparkSession.sql(testcase._3).queryExecution.analyzed)
+        val rewriteSQL = mvSession.mvSession.rewriteToSQL(mvSession.mvSession.sparkSession.sql(testcase._3).queryExecution.optimizedPlan)
         LOGGER.info(s"\n\n\n\n===== Rewritten query for ${testcase._1} =====\n\n${rewriteSQL}\n")
         
         if (!rewriteSQL.trim.equals(testcase._4)) {
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesCreateDataMapCommand.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesCreateDataMapCommand.scala
index 8cf94de..375eb64 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesCreateDataMapCommand.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesCreateDataMapCommand.scala
@@ -19,7 +19,7 @@ package org.apache.carbondata.mv.timeseries
 
 import java.util.concurrent.{Callable, Executors, TimeUnit}
 
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException
@@ -27,7 +27,7 @@ import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.mv.rewrite.TestUtil
 
-class TestMVTimeSeriesCreateDataMapCommand extends CarbonQueryTest with BeforeAndAfterAll {
+class TestMVTimeSeriesCreateDataMapCommand extends QueryTest with BeforeAndAfterAll {
 
   private val timestampFormat = CarbonProperties.getInstance()
     .getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT)
@@ -36,7 +36,7 @@ class TestMVTimeSeriesCreateDataMapCommand extends CarbonQueryTest with BeforeAn
     CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")
     drop()
     sql("CREATE TABLE maintable (empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, " +
-        "deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int, utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
+        "deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int, utilization int,salary int) STORED AS carbondata")
     sql(s"""LOAD DATA local inpath '$resourcesPath/data_big.csv' INTO TABLE maintable  OPTIONS
          |('DELIMITER'= ',', 'QUOTECHAR'= '"')""".stripMargin)
   }
@@ -56,7 +56,7 @@ class TestMVTimeSeriesCreateDataMapCommand extends CarbonQueryTest with BeforeAn
     assert(result.get(0).get(0).toString.equalsIgnoreCase("datamap1"))
     assert(result.get(0).get(4).toString.equalsIgnoreCase("ENABLED"))
     val df = sql("select timeseries(projectjoindate,'second'), sum(projectcode) from maintable group by timeseries(projectjoindate,'second')")
-    assert(TestUtil.verifyMVDataMap(df.queryExecution.analyzed, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
     sql("drop datamap if exists datamap1")
   }
 
@@ -81,7 +81,7 @@ class TestMVTimeSeriesCreateDataMapCommand extends CarbonQueryTest with BeforeAn
   test("test mv_timeseries create datamap with date type as timeseries_column") {
     sql("drop table IF EXISTS maintable_new")
     sql("CREATE TABLE maintable_new (projectcode int, projectjoindate date, projectenddate Timestamp,attendance int) " +
-        "STORED BY 'org.apache.carbondata.format'")
+        "STORED AS carbondata")
     sql("drop datamap if exists datamap1")
     sql(
       "create datamap datamap1 on table maintable_new using 'mv' as " +
@@ -95,7 +95,7 @@ class TestMVTimeSeriesCreateDataMapCommand extends CarbonQueryTest with BeforeAn
   test("test mv_timeseries create datamap with date type as timeseries_column with incorrect granularity") {
     sql("drop table IF EXISTS maintable_new")
     sql("CREATE TABLE maintable_new (projectcode int, projectjoindate date, projectenddate Timestamp,attendance int) " +
-        "STORED BY 'org.apache.carbondata.format'")
+        "STORED AS carbondata")
     sql("drop datamap if exists datamap1")
     intercept[MalformedCarbonCommandException] {
       sql(
@@ -118,7 +118,7 @@ class TestMVTimeSeriesCreateDataMapCommand extends CarbonQueryTest with BeforeAn
   test("test mv_timeseries create datamap - Parent table name is different in Create and Select Statement") {
     sql("drop table if exists main_table")
     sql("CREATE TABLE main_table (empname String, designation String, doj Timestamp, workgroupcategory int, workgroupcategoryname String, deptno int, " +
-        "deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int, utilization int,salary int) STORED BY 'org.apache.carbondata.format'")
+        "deptname String, projectcode int, projectjoindate Timestamp, projectenddate Timestamp,attendance int, utilization int,salary int) STORED AS carbondata")
     sql("drop datamap if exists datamap1")
     intercept[MalformedCarbonCommandException] {
       sql(
diff --git a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesLoadAndQuery.scala b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesLoadAndQuery.scala
index 2b57dda..3916ea3 100644
--- a/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesLoadAndQuery.scala
+++ b/datamap/mv/core/src/test/scala/org/apache/carbondata/mv/timeseries/TestMVTimeSeriesLoadAndQuery.scala
@@ -38,8 +38,7 @@ class TestMVTimeSeriesLoadAndQuery extends QueryTest with BeforeAndAfterAll {
       "select timeseries(projectjoindate,'minute'), sum(projectcode) from maintable group by timeseries(projectjoindate,'minute')")
     loadData("maintable")
     val df = sql("select timeseries(projectjoindate,'minute'), sum(projectcode) from maintable group by timeseries(projectjoindate,'minute')")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
     dropDataMap("datamap1")
     sql(
       "create datamap datamap1 on table maintable using 'mv' as " +
@@ -48,8 +47,7 @@ class TestMVTimeSeriesLoadAndQuery extends QueryTest with BeforeAndAfterAll {
     sql("select * from datamap1_table").show(false)
     val df1 = sql("select timeseries(projectjoindate,'minute'),sum(projectcode) from maintable where timeseries(projectjoindate,'minute') = '2016-02-23 09:17:00'" +
                   "group by timeseries(projectjoindate,'minute')")
-    val analyzed1 = df1.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed1, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df1.queryExecution.optimizedPlan, "datamap1"))
     dropDataMap("datamap1")
   }
 
@@ -61,8 +59,7 @@ class TestMVTimeSeriesLoadAndQuery extends QueryTest with BeforeAndAfterAll {
     loadData("maintable")
     val df = sql("select timeseries(projectjoindate,'hour'), sum(projectcode) from maintable where timeseries(projectjoindate,'hour') = '2016-02-23 09:00:00' " +
                  "group by timeseries(projectjoindate,'hour')")
-    val analyzed = df.queryExecution.analyzed
-    assert(TestUtil.verifyMVDataMap(analyzed, "datamap1"))
+    assert(TestUtil.verifyMVDataMap(df.queryExecution.optimizedPlan, "datamap1"))
     dropDataMap("datamap1")
   }
 
@@ -315,7 +312,7 @@ class TestMVTimeSeriesLoadAndQuery extends QueryTest with BeforeAndAfterAll {
     sql("drop table if exists secondtable")
     sql(
       "CREATE TABLE secondtable (empno int,empname string, projectcode int, projectjoindate " +
-      "Timestamp,salary double) STORED BY 'org.apache.carbondata.format'")
+      "Timestamp,salary double) STORED AS carbondata")
     loadData("secondtable")
     sql(
       "create datamap datamap1 using 'mv' as " +
@@ -357,7 +354,7 @@ class TestMVTimeSeriesLoadAndQuery extends QueryTest with BeforeAndAfterAll {
   def createTable(): Unit = {
     sql(
       "CREATE TABLE maintable (empno int,empname string, projectcode int, projectjoindate " +
-      "Timestamp,salary double) STORED BY 'org.apache.carbondata.format'")
+      "Timestamp,salary double) STORED AS carbondata")
   }
 
   def loadData(table: String): Unit = {
@@ -367,7 +364,7 @@ class TestMVTimeSeriesLoadAndQuery extends QueryTest with BeforeAndAfterAll {
   }
 
   def checkPlan(dataMapName: String, df: DataFrame): Unit = {
-    val analyzed = df.queryExecution.analyzed
+    val analyzed = df.queryExecution.optimizedPlan
     assert(TestUtil.verifyMVDataMap(analyzed, dataMapName))
   }
 }
diff --git a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/testutil/ModularPlanTest.scala b/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/testutil/ModularPlanTest.scala
index 6ac22e7..9d4735c 100644
--- a/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/testutil/ModularPlanTest.scala
+++ b/datamap/mv/plan/src/main/scala/org/apache/carbondata/mv/testutil/ModularPlanTest.scala
@@ -21,7 +21,7 @@ import org.apache.spark.sql.catalyst.expressions._
 import org.apache.spark.sql.catalyst.expressions.aggregate.AggregateExpression
 import org.apache.spark.sql.catalyst.plans.logical._
 import org.apache.spark.sql.catalyst.util._
-import org.apache.spark.sql.test.util.CarbonQueryTest
+import org.apache.spark.sql.test.util.QueryTest
 
 import org.apache.carbondata.mv.plans.modular
 import org.apache.carbondata.mv.plans.modular.{ModularPlan, OneRowTable, Select}
@@ -30,7 +30,7 @@ import org.apache.carbondata.mv.plans.modular.Flags._
 /**
  * Provides helper methods for comparing plans.
  */
-abstract class ModularPlanTest extends CarbonQueryTest with PredicateHelper {
+abstract class ModularPlanTest extends QueryTest with PredicateHelper {
 
   /**
    * Since attribute references are given globally unique ids during analysis,
diff --git a/datamap/mv/plan/src/test/scala/org/apache/carbondata/mv/plans/ModularToSQLSuite.scala b/datamap/mv/plan/src/test/scala/org/apache/carbondata/mv/plans/ModularToSQLSuite.scala
index 73809d6..ee58a83 100644
--- a/datamap/mv/plan/src/test/scala/org/apache/carbondata/mv/plans/ModularToSQLSuite.scala
+++ b/datamap/mv/plan/src/test/scala/org/apache/carbondata/mv/plans/ModularToSQLSuite.scala
@@ -21,7 +21,6 @@ import org.apache.spark.sql.hive.CarbonSessionCatalogUtil
 import org.scalatest.BeforeAndAfter
 import org.apache.carbondata.mv.dsl.Plans._
 import org.apache.carbondata.mv.testutil.ModularPlanTest
-import org.apache.spark.sql.util.SparkSQLUtil
 
 class ModularToSQLSuite extends ModularPlanTest with BeforeAndAfter {
 
@@ -123,8 +122,7 @@ class ModularToSQLSuite extends ModularPlanTest with BeforeAndAfter {
     testHive.udf.register("my_fun", (s: Integer) => s)
     
     testSQLBatch.foreach { query =>
-      val analyzed = testHive.sql(query).queryExecution.analyzed
-      val optimized = analyzed.optimize
+      val analyzed = testHive.sql(query).queryExecution.optimizedPlan
       val modularPlan = analyzed.optimize.modularize
 
       LOGGER.info(s"\n\n===== MODULAR PLAN =====\n\n${modularPlan.treeString} \n")
@@ -134,7 +132,7 @@ class ModularToSQLSuite extends ModularPlanTest with BeforeAndAfter {
 
       LOGGER.info(s"\n\n===== CONVERTED SQL =====\n\n$compactSql \n")
       
-      val analyzed1 = testHive.sql(convertedSql).queryExecution.analyzed
+      val analyzed1 = testHive.sql(convertedSql).queryExecution.optimizedPlan
       val modularPlan1 = analyzed1.optimize.modularize
 
       LOGGER.info(s"\n\n===== CONVERTED SQL =====\n\n$compactSql \n")
diff --git a/datamap/mv/plan/src/test/scala/org/apache/carbondata/mv/plans/SignatureSuite.scala b/datamap/mv/plan/src/test/scala/org/apache/carbondata/mv/plans/SignatureSuite.scala
index 9d0548f..138210d 100644
--- a/datamap/mv/plan/src/test/scala/org/apache/carbondata/mv/plans/SignatureSuite.scala
+++ b/datamap/mv/plan/src/test/scala/org/apache/carbondata/mv/plans/SignatureSuite.scala
@@ -23,7 +23,6 @@ import org.scalatest.BeforeAndAfterAll
 import org.apache.carbondata.mv.dsl.Plans._
 import org.apache.carbondata.mv.plans.modular.ModularPlanSignatureGenerator
 import org.apache.carbondata.mv.testutil.ModularPlanTest
-import org.apache.spark.sql.util.SparkSQLUtil
 
 class SignatureSuite extends ModularPlanTest with BeforeAndAfterAll {
   import org.apache.carbondata.mv.testutil.TestSQLBatch._
@@ -59,7 +58,7 @@ class SignatureSuite extends ModularPlanTest with BeforeAndAfterAll {
         )   
         
     testSQLBatch.foreach { query =>
-      val analyzed = testHive.sql(query).queryExecution.analyzed
+      val analyzed = testHive.sql(query).queryExecution.optimizedPlan
       val modularPlan = analyzed.optimize.modularize
       val sig = ModularPlanSignatureGenerator.generate(modularPlan)
       sig match {
diff --git a/docs/datamap/mv-datamap-guide.md b/docs/datamap/mv-datamap-guide.md
index 5298a27..1e5a0bc 100644
--- a/docs/datamap/mv-datamap-guide.md
+++ b/docs/datamap/mv-datamap-guide.md
@@ -29,7 +29,7 @@
 
 Start spark-sql in terminal and run the following queries,
 ```
-CREATE TABLE maintable(a int, b string, c int) stored by 'carbondata';
+CREATE TABLE maintable(a int, b string, c int) stored as carbondata;
 insert into maintable select 1, 'ab', 2;
 CREATE DATAMAP datamap_1 on table maintable as SELECT a, sum(b) from maintable group by a;
 SELECT a, sum(b) from maintable group by a;
diff --git a/docs/ddl-of-carbondata.md b/docs/ddl-of-carbondata.md
index fcae480..7171ff3 100644
--- a/docs/ddl-of-carbondata.md
+++ b/docs/ddl-of-carbondata.md
@@ -337,13 +337,13 @@ CarbonData DDL statements are documented here,which includes:
       Syntax:
 
       ```
-      CREATE TABLE [dbName].tableName (col1 String, col2 String, col3 int,…) STORED BY 'carbondata' TBLPROPERTIES ('COLUMN_META_CACHE'='col1,col2,…')
+      CREATE TABLE [dbName].tableName (col1 String, col2 String, col3 int,…) STORED AS carbondata TBLPROPERTIES ('COLUMN_META_CACHE'='col1,col2,…')
       ```
 
       Example:
 
       ```
-      CREATE TABLE employee (name String, city String, id int) STORED BY 'carbondata' TBLPROPERTIES ('COLUMN_META_CACHE'='name')
+      CREATE TABLE employee (name String, city String, id int) STORED AS carbondata TBLPROPERTIES ('COLUMN_META_CACHE'='name')
       ```
 
       After creation of table or on already created tables use the alter table command to configure the columns to be cached.
@@ -384,13 +384,13 @@ CarbonData DDL statements are documented here,which includes:
       Syntax:
 
       ```
-      CREATE TABLE [dbName].tableName (col1 String, col2 String, col3 int,…) STORED BY 'carbondata' TBLPROPERTIES ('CACHE_LEVEL'='Blocklet')
+      CREATE TABLE [dbName].tableName (col1 String, col2 String, col3 int,…) STORED AS carbondata TBLPROPERTIES ('CACHE_LEVEL'='Blocklet')
       ```
 
       Example:
 
       ```
-      CREATE TABLE employee (name String, city String, id int) STORED BY 'carbondata' TBLPROPERTIES ('CACHE_LEVEL'='Blocklet')
+      CREATE TABLE employee (name String, city String, id int) STORED AS carbondata TBLPROPERTIES ('CACHE_LEVEL'='Blocklet')
       ```
 
       After creation of table or on already created tables use the alter table command to configure the cache level.
@@ -418,7 +418,7 @@ CarbonData DDL statements are documented here,which includes:
 
        Example:
        ```
-       CREATE TABLE employee (name String, city String, id int) STORED BY 'carbondata' TBLPROPERTIES ('flat_folder'='true')
+       CREATE TABLE employee (name String, city String, id int) STORED AS carbondata TBLPROPERTIES ('flat_folder'='true')
        ```
 
    - ##### String longer than 32000 characters
@@ -875,7 +875,7 @@ Users can specify which columns to include and exclude for local dictionary gene
     [(col_name data_type , ...)]
     [COMMENT table_comment]
     [PARTITIONED BY (col_name data_type , ...)]
-    [STORED BY file_format]
+    [STORED AS file_format]
     [TBLPROPERTIES (property_name=property_value, ...)]
   ```
 
diff --git a/docs/hive-guide.md b/docs/hive-guide.md
index 7aba3bf..1941168 100644
--- a/docs/hive-guide.md
+++ b/docs/hive-guide.md
@@ -59,7 +59,7 @@ val metaStoreDB = s"$rootPath/metastore_db"
 
 val carbon = SparkSession.builder().enableHiveSupport().config("spark.sql.warehouse.dir", warehouse).config(org.apache.carbondata.core.constants.CarbonCommonConstants.STORE_LOCATION, storeLocation).getOrCreateCarbonSession(storeLocation, metaStoreDB)
 
-carbon.sql("create table hive_carbon(id int, name string, scale decimal, country string, salary double) STORED BY 'carbondata'")
+carbon.sql("create table hive_carbon(id int, name string, scale decimal, country string, salary double) STORED AS carbondata")
 carbon.sql("LOAD DATA INPATH '<hdfs store path>/sample.csv' INTO TABLE hive_carbon")
 scala>carbon.sql("SELECT * FROM hive_carbon").show()
 ```
diff --git "a/docs/zh_cn/CarbonData\345\205\270\345\236\213\345\272\224\347\224\250\345\234\272\346\231\257\344\271\213\346\230\216\347\273\206\346\225\260\346\215\256\346\237\245\350\257\242\357\274\232\347\202\271\346\237\245+\350\277\207\346\273\244\346\235\241\344\273\266.md" "b/docs/zh_cn/CarbonData\345\205\270\345\236\213\345\272\224\347\224\250\345\234\272\346\231\257\344\271\213\346\230\216\347\273\206\346\225\260\346\215\256\346\237\245\350\257\242\357\274\232\347\202\271\346\237 [...]
index 191326f..6e399bd 100644
--- "a/docs/zh_cn/CarbonData\345\205\270\345\236\213\345\272\224\347\224\250\345\234\272\346\231\257\344\271\213\346\230\216\347\273\206\346\225\260\346\215\256\346\237\245\350\257\242\357\274\232\347\202\271\346\237\245+\350\277\207\346\273\244\346\235\241\344\273\266.md"
+++ "b/docs/zh_cn/CarbonData\345\205\270\345\236\213\345\272\224\347\224\250\345\234\272\346\231\257\344\271\213\346\230\216\347\273\206\346\225\260\346\215\256\346\237\245\350\257\242\357\274\232\347\202\271\346\237\245+\350\277\207\346\273\244\346\235\241\344\273\266.md"
@@ -404,7 +404,7 @@ create table IF NOT EXISTS example_table
 `REQ_TIME_SEC`    BIGINT,
 `REQ_SUCCED_FLAG` INT
 .....
-) STORED BY 'org.apache.carbondata.format' 
+) STORED AS carbondata 
 TBLPROPERTIES ( 'LOCAL_DICTIONARY_ENABLE'='false','SORT_COLUMNS'='msisdn,req_time_sec,req_succed_flag', 'SORT_SCOPE'='LOCAL_SORT' )
 ```
 
diff --git a/examples/spark2/src/main/java/org/apache/carbondata/examples/sql/JavaCarbonSessionExample.java b/examples/spark2/src/main/java/org/apache/carbondata/examples/sql/JavaCarbonSessionExample.java
index 7fe8afe..de0a250 100644
--- a/examples/spark2/src/main/java/org/apache/carbondata/examples/sql/JavaCarbonSessionExample.java
+++ b/examples/spark2/src/main/java/org/apache/carbondata/examples/sql/JavaCarbonSessionExample.java
@@ -22,8 +22,9 @@ import java.io.IOException;
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.util.CarbonProperties;
+import org.apache.carbondata.examples.util.ExampleUtils;
 
-import org.apache.spark.sql.CarbonEnv;
+import org.apache.spark.sql.CarbonSession;
 import org.apache.spark.sql.SparkSession;
 
 public class JavaCarbonSessionExample {
@@ -44,8 +45,6 @@ public class JavaCarbonSessionExample {
 
     SparkSession carbon = builder.getOrCreate();
 
-    CarbonEnv.getInstance(carbon);
-
     exampleBody(carbon);
     carbon.close();
   }
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala b/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
index 677bbb8..4e69c5a 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
@@ -21,7 +21,8 @@ import java.io.File
 import java.text.SimpleDateFormat
 import java.util.Date
 
-import org.apache.spark.sql.{CarbonEnv, DataFrame, Row, SaveMode, SparkSession}
+import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparkSession}
+import org.apache.spark.sql.types._
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.{CarbonProperties, CarbonUtil}
@@ -308,7 +309,7 @@ object SimpleQueryBenchmark {
         .addProperty("enable.unsafe.sort", "true")
         .addProperty("carbon.blockletgroup.size.in.mb", "32")
         .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE, "true")
-
+    import org.apache.spark.sql.CarbonSession._
     val rootPath = new File(this.getClass.getResource("/").getPath
         + "../../../..").getCanonicalPath
     val storeLocation = s"$rootPath/examples/spark2/target/store"
@@ -323,7 +324,6 @@ object SimpleQueryBenchmark {
         .config("spark.driver.host", "127.0.0.1")
         .config("spark.sql.extensions", "org.apache.spark.sql.CarbonExtensions")
         .getOrCreate()
-    CarbonEnv.getInstance(spark)
     spark.sparkContext.setLogLevel("warn")
 
     val table1 = parquetTableName
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
index a52a016..836e6fa 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
@@ -38,8 +38,7 @@ import org.apache.carbondata.examples.util.ExampleUtils
  */
 object AlluxioExample {
   def main (args: Array[String]) {
-    val carbon = ExampleUtils.createCarbonSession("AlluxioExample",
-      storePath = "alluxio://localhost:19998/carbondata")
+    val carbon = ExampleUtils.createSparkSession("AlluxioExample")
     val runShell: Boolean = if (null != args && args.length > 0) {
       args(0).toBoolean
     } else {
@@ -88,7 +87,7 @@ object AlluxioExample {
          |    phonetype String,
          |    serialname String,
          |    salary Int)
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
          | TBLPROPERTIES(
          |    'SORT_COLUMNS' = 'phonetype,name',
          |    'TABLE_BLOCKSIZE'='32',
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlterTableExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlterTableExample.scala
index d2b4c95..1e5ba83 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlterTableExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlterTableExample.scala
@@ -30,7 +30,7 @@ object AlterTableExample {
 
   def main(args: Array[String]): Unit = {
 
-    val spark = ExampleUtils.createCarbonSession("AlterTableExample")
+    val spark = ExampleUtils.createSparkSession("AlterTableExample")
     exampleBody(spark)
     spark.close()
   }
@@ -54,7 +54,7 @@ object AlterTableExample {
          | floatField FLOAT,
          | complexData ARRAY<STRING>
          | )
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
        """.stripMargin)
 
     // Alter table change data type
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala
index 2abd151..8a74732 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonDataFrameExample.scala
@@ -24,7 +24,7 @@ import org.apache.carbondata.examples.util.ExampleUtils
 object CarbonDataFrameExample {
 
   def main(args: Array[String]) {
-    val spark = ExampleUtils.createCarbonSession("CarbonDataFrameExample")
+    val spark = ExampleUtils.createSparkSession("CarbonDataFrameExample")
     exampleBody(spark)
     spark.close()
   }
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
index b6921f2..d5c1188 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
@@ -37,9 +37,9 @@ object CarbonSessionExample {
       s"$rootPath/examples/spark2/src/main/resources/log4j.properties")
 
     CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "true")
+      .addProperty(CarbonCommonConstants.ENABLE_QUERY_STATISTICS, "false")
     val spark = ExampleUtils.createCarbonSession("CarbonSessionExample")
-    spark.sparkContext.setLogLevel("INFO")
+    spark.sparkContext.setLogLevel("error")
     exampleBody(spark)
     spark.close()
   }
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
index adcce68..a521bdc 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSortColumnsExample.scala
@@ -28,7 +28,7 @@ import org.apache.carbondata.examples.util.ExampleUtils
 object CarbonSortColumnsExample {
 
   def main(args: Array[String]) {
-    val spark = ExampleUtils.createCarbonSession("CarbonSessionExample")
+    val spark = ExampleUtils.createSparkSession("CarbonSessionExample")
     exampleBody(spark)
     spark.close()
   }
@@ -58,7 +58,7 @@ object CarbonSortColumnsExample {
          | floatField FLOAT,
          | complexData ARRAY<STRING>
          | )
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
          | TBLPROPERTIES('SORT_COLUMNS'='')
        """.stripMargin)
 
@@ -81,7 +81,7 @@ object CarbonSortColumnsExample {
          | floatField FLOAT,
          | complexData ARRAY<STRING>
          | )
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
          | TBLPROPERTIES('SORT_COLUMNS'='intField, stringField, charField')
        """.stripMargin)
 
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
index 8bf391f..2ddb8ac 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
@@ -27,7 +27,7 @@ case class People(name: String, occupation: String, id: Int)
 object CaseClassDataFrameAPIExample {
 
   def main(args: Array[String]) {
-    val spark = ExampleUtils.createCarbonSession("CaseClassDataFrameAPIExample")
+    val spark = ExampleUtils.createSparkSession("CaseClassDataFrameAPIExample")
     exampleBody(spark)
     spark.close()
   }
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CustomCompactionExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CustomCompactionExample.scala
index fcd5949..cfa2766 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CustomCompactionExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CustomCompactionExample.scala
@@ -33,7 +33,7 @@ import org.apache.carbondata.examples.util.ExampleUtils
 object CustomCompactionExample {
 
   def main(args: Array[String]): Unit = {
-    val spark = ExampleUtils.createCarbonSession("CustomCompactionExample")
+    val spark = ExampleUtils.createSparkSession("CustomCompactionExample")
     exampleBody(spark)
     spark.close()
   }
@@ -47,15 +47,16 @@ object CustomCompactionExample {
     spark.sql(
       s"""
          | CREATE TABLE IF NOT EXISTS custom_compaction_table(
-         | ID Int,
-         | date Date,
-         | country String,
-         | name String,
-         | phonetype String,
-         | serialname String,
-         | salary Int,
-         | floatField float
-         | ) STORED BY 'carbondata'
+         |   ID Int,
+         |   date Date,
+         |   country String,
+         |   name String,
+         |   phonetype String,
+         |   serialname String,
+         |   salary Int,
+         |   floatField float
+         | )
+         | STORED AS carbondata
        """.stripMargin)
 
     val rootPath = new File(this.getClass.getResource("/").getPath
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataFrameComplexTypeExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataFrameComplexTypeExample.scala
index 663e247..5547d1a 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataFrameComplexTypeExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataFrameComplexTypeExample.scala
@@ -40,7 +40,7 @@ object DataFrameComplexTypeExample {
 
   def main(args: Array[String]) {
 
-    val spark = ExampleUtils.createCarbonSession("DataFrameComplexTypeExample", 4)
+    val spark = ExampleUtils.createSparkSession("DataFrameComplexTypeExample", 4)
     exampleBody(spark)
     spark.close()
   }
@@ -64,7 +64,7 @@ object DataFrameComplexTypeExample {
          | salary FLOAT,
          | file array<string>
          | )
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
          | TBLPROPERTIES(
          | 'sort_columns'='name')
          | """.stripMargin)
@@ -79,7 +79,7 @@ object DataFrameComplexTypeExample {
          | salary FLOAT,
          | file struct<school:array<string>, school1:array<string>, age:int>
          | )
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
          | TBLPROPERTIES(
          | 'sort_columns'='name')
          | """.stripMargin)
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
index fde66e1..814a553 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataManagementExample.scala
@@ -27,7 +27,7 @@ import org.apache.carbondata.examples.util.ExampleUtils
 object DataManagementExample {
 
   def main(args: Array[String]) {
-    val spark = ExampleUtils.createCarbonSession("DataManagementExample")
+    val spark = ExampleUtils.createSparkSession("DataManagementExample")
     exampleBody(spark)
     spark.close()
   }
@@ -39,15 +39,16 @@ object DataManagementExample {
     spark.sql(
       s"""
          | CREATE TABLE IF NOT EXISTS datamanagement_table(
-         | ID Int,
-         | date Date,
-         | country String,
-         | name String,
-         | phonetype String,
-         | serialname String,
-         | salary Int,
-         | floatField float
-         | ) STORED BY 'carbondata'
+         |   ID Int,
+         |   date Date,
+         |   country String,
+         |   name String,
+         |   phonetype String,
+         |   serialname String,
+         |   salary Int,
+         |   floatField float
+         | )
+         | STORED AS carbondata
        """.stripMargin)
 
     val rootPath = new File(this.getClass.getResource("/").getPath
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala
index 8af1d3e..f72cb42 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataUpdateDeleteExample.scala
@@ -31,7 +31,7 @@ object DataUpdateDeleteExample {
 
   def main(args: Array[String]) {
 
-    val spark = ExampleUtils.createCarbonSession("DataUpdateDeleteExample")
+    val spark = ExampleUtils.createSparkSession("DataUpdateDeleteExample")
     exampleBody(spark)
     spark.close()
   }
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
index 120ab85..1b0145a 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DirectSQLExample.scala
@@ -35,7 +35,7 @@ import org.apache.carbondata.sdk.file.{CarbonWriter, Field, Schema}
 object DirectSQLExample {
 
   def main(args: Array[String]) {
-    val carbonSession = ExampleUtils.createCarbonSession("DirectSQLExample")
+    val carbonSession = ExampleUtils.createSparkSession("DirectSQLExample")
     exampleBody(carbonSession)
     carbonSession.close()
   }
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala
index 9d5ee8e..2fb62b3 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/ExternalTableExample.scala
@@ -33,7 +33,7 @@ import org.apache.carbondata.examples.util.ExampleUtils
 object ExternalTableExample {
 
   def main(args: Array[String]) {
-    val spark = ExampleUtils.createCarbonSession("ExternalTableExample")
+    val spark = ExampleUtils.createSparkSession("ExternalTableExample")
     exampleBody(spark)
     spark.close()
   }
@@ -48,18 +48,18 @@ object ExternalTableExample {
     spark.sql(
       s"""
          | CREATE TABLE origin_table(
-         | shortField SHORT,
-         | intField INT,
-         | bigintField LONG,
-         | doubleField DOUBLE,
-         | stringField STRING,
-         | timestampField TIMESTAMP,
-         | decimalField DECIMAL(18,2),
-         | dateField DATE,
-         | charField CHAR(5),
-         | floatField FLOAT
+         |   shortField SHORT,
+         |   intField INT,
+         |   bigintField LONG,
+         |   doubleField DOUBLE,
+         |   stringField STRING,
+         |   timestampField TIMESTAMP,
+         |   decimalField DECIMAL(18,2),
+         |   dateField DATE,
+         |   charField CHAR(5),
+         |   floatField FLOAT
          | )
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
        """.stripMargin)
 
     val rootPath = new File(this.getClass.getResource("/").getPath
@@ -83,7 +83,7 @@ object ExternalTableExample {
 
     // Create external_table
     spark.sql("DROP TABLE IF EXISTS external_table")
-    spark.sql("CREATE EXTERNAL TABLE external_table STORED BY 'carbondata'" +
+    spark.sql("CREATE EXTERNAL TABLE external_table STORED AS carbondata" +
               s" LOCATION '$origin_table_path'")
     spark.sql("SELECT count(*) FROM external_table").show()
 
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
index 7438638..5b2332c 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
@@ -30,7 +30,7 @@ import org.apache.carbondata.hadoop.CarbonProjection
 object HadoopFileExample {
 
   def main(args: Array[String]): Unit = {
-    val spark = ExampleUtils.createCarbonSession("HadoopFileExample")
+    val spark = ExampleUtils.createSparkSession("HadoopFileExample")
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
     val storeLocation: String = rootPath + "/examples/spark2/target/store/default"
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/HiveExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
index 91448e7..7f0a23b 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/HiveExample.scala
@@ -21,6 +21,7 @@ import java.sql.{DriverManager, ResultSet, Statement}
 
 import org.apache.hadoop.fs.Path
 import org.apache.hadoop.fs.permission.{FsAction, FsPermission}
+import org.apache.spark.sql.SparkSession
 
 import org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.core.datastore.impl.FileFactory
@@ -31,79 +32,74 @@ import org.apache.carbondata.hive.test.server.HiveEmbeddedServer2
 object HiveExample {
 
   private val driverName: String = "org.apache.hive.jdbc.HiveDriver"
-
   val rootPath = new File(this.getClass.getResource("/").getPath
                           + "../../../..").getCanonicalPath
-  private val targetLoc = s"$rootPath/examples/spark2/target"
+  val targetLoc = s"$rootPath/examples/spark2/target"
+  System.setProperty("derby.system.home", s"$targetLoc")
   val metaStoreLoc = s"$targetLoc/metastore_db"
-  val storeLocation = s"$targetLoc/store"
   val logger = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
 
-
   def main(args: Array[String]) {
-    createCarbonTable(storeLocation)
+    val sparkSession = ExampleUtils.createSparkSession("HiveExample")
+    createCarbonTable(sparkSession)
     readFromHive
     System.exit(0)
   }
 
-  def createCarbonTable(store: String): Unit = {
+  def createCarbonTable(sparkSession: SparkSession): Unit = {
 
-    val carbonSession = ExampleUtils.createCarbonSession("HiveExample")
+    sparkSession.sql("""DROP TABLE IF EXISTS HIVE_CARBON_EXAMPLE""".stripMargin)
 
-    carbonSession.sql("""DROP TABLE IF EXISTS HIVE_CARBON_EXAMPLE""".stripMargin)
-
-    carbonSession.sql(
+    sparkSession.sql(
       s"""
          | CREATE TABLE HIVE_CARBON_EXAMPLE
          | (ID int,NAME string,SALARY double)
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
        """.stripMargin)
 
     var inputPath = FileFactory
       .getUpdatedFilePath(s"$rootPath/examples/spark2/src/main/resources/sample.csv")
 
-    carbonSession.sql(
+    sparkSession.sql(
       s"""
          | LOAD DATA LOCAL INPATH '$inputPath'
          | INTO TABLE HIVE_CARBON_EXAMPLE
        """.stripMargin)
 
-    carbonSession.sql(
+    sparkSession.sql(
       s"""
          | LOAD DATA LOCAL INPATH '$inputPath'
          | INTO TABLE HIVE_CARBON_EXAMPLE
        """.stripMargin)
 
-    carbonSession.sql("SELECT * FROM HIVE_CARBON_EXAMPLE").show()
+    sparkSession.sql("SELECT * FROM HIVE_CARBON_EXAMPLE").show()
 
-    carbonSession.sql("DROP TABLE IF EXISTS TEST_BOUNDARY")
+    sparkSession.sql("DROP TABLE IF EXISTS TEST_BOUNDARY")
 
-    carbonSession
+    sparkSession
       .sql(
         s"""CREATE TABLE TEST_BOUNDARY (c1_int int,c2_Bigint Bigint,c3_Decimal Decimal(38,30),
            |c4_double double,c5_string string,c6_Timestamp Timestamp,c7_Datatype_Desc string)
-           |STORED BY 'org.apache.carbondata.format' """.stripMargin)
+           |STORED AS carbondata""".stripMargin)
 
     inputPath = FileFactory
       .getUpdatedFilePath(s"$rootPath/examples/spark2/src/main/resources/Test_Data1.csv")
 
-    carbonSession
+    sparkSession
       .sql(
         s"LOAD DATA INPATH '$inputPath' INTO table TEST_BOUNDARY OPTIONS('DELIMITER'=','," +
         "'QUOTECHAR'='\"', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='c1_int,c2_Bigint," +
         "c3_Decimal,c4_double,c5_string,c6_Timestamp,c7_Datatype_Desc')")
 
-    carbonSession.sql("""DROP TABLE IF EXISTS complexMap""".stripMargin)
+    sparkSession.sql("""DROP TABLE IF EXISTS complexMap""".stripMargin)
 
-    carbonSession.sql("create table complexMap(name map<string,string>) stored by 'carbondata'")
+    sparkSession.sql("create table complexMap(name map<string,string>) STORED AS carbondata")
 
-    carbonSession
+    sparkSession
       .sql(
         "insert into complexMap values(map('Manish','Nalla','Shardul','Singh','Vishal','Kumar'," +
         "'EmptyVal','','NullVal', 'null'))")
 
-    carbonSession.close()
-
     // delete the already existing lock on metastore so that new derby instance
     // for HiveServer can run on the same metastore
     checkAndDeleteDBLock
@@ -205,7 +201,7 @@ object HiveExample {
       individualColRowsFetched = individualColRowsFetched + 1
     }
     println(" ********** Total Rows Fetched When Quering The Individual Columns **********" +
-      s"$individualColRowsFetched")
+            s"$individualColRowsFetched")
     assert(individualColRowsFetched == 4)
 
     logger.info("Fetching the Out Of Order Columns ")
@@ -239,7 +235,7 @@ object HiveExample {
       outOfOrderColFetched = outOfOrderColFetched + 1
     }
     println(" ********** Total Rows Fetched When Quering The Out Of Order Columns **********" +
-      s"$outOfOrderColFetched")
+            s"$outOfOrderColFetched")
     assert(outOfOrderColFetched == 4)
 
     val resultAggQuery = statement
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
index fe94f54..bcc16ca 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/LuceneDataMapExample.scala
@@ -29,7 +29,7 @@ import org.apache.carbondata.examples.util.ExampleUtils
 object LuceneDataMapExample {
 
   def main(args: Array[String]) {
-    val spark = ExampleUtils.createCarbonSession("LuceneDataMapExample")
+    val spark = ExampleUtils.createSparkSession("LuceneDataMapExample")
     exampleBody(spark)
     spark.close()
   }
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/MVDataMapExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/MVDataMapExample.scala
index 2600ab9..56fb871 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/MVDataMapExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/MVDataMapExample.scala
@@ -31,7 +31,7 @@ import org.apache.carbondata.examples.util.ExampleUtils
 object MVDataMapExample {
 
   def main(args: Array[String]) {
-    val spark = ExampleUtils.createCarbonSession("MVDataMapExample")
+    val spark = ExampleUtils.createSparkSession("MVDataMapExample")
     exampleBody(spark)
     performanceTest(spark)
     spark.close()
@@ -51,7 +51,7 @@ object MVDataMapExample {
         | name String,
         | city String,
         | age Int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
 
     spark.sql(
@@ -59,7 +59,7 @@ object MVDataMapExample {
         | CREATE TABLE dimtable
         | (name String,
         | address String)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
 
     spark.sql(s"""LOAD DATA LOCAL INPATH '$testData' into table mainTable""")
@@ -137,7 +137,7 @@ object MVDataMapExample {
         | CREATE TABLE emp_address
         | (name String,
         | address String)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
 
     spark.sql(
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/QuerySegmentExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/QuerySegmentExample.scala
index 5719926..fdcd029 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/QuerySegmentExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/QuerySegmentExample.scala
@@ -33,7 +33,7 @@ import org.apache.carbondata.examples.util.ExampleUtils
 object QuerySegmentExample {
 
   def main(args: Array[String]) {
-    val spark = ExampleUtils.createCarbonSession("QuerySegmentExample")
+    val spark = ExampleUtils.createSparkSession("QuerySegmentExample")
     exampleBody(spark)
     spark.close()
   }
@@ -58,7 +58,7 @@ object QuerySegmentExample {
          | charField CHAR(5),
          | floatField FLOAT
          | )
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
        """.stripMargin)
 
     val rootPath = new File(this.getClass.getResource("/").getPath
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3CsvExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3CsvExample.scala
index a43c1ff..64a836e 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3CsvExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3CsvExample.scala
@@ -19,7 +19,7 @@ package org.apache.carbondata.examples
 import java.io.File
 
 import org.apache.hadoop.fs.s3a.Constants.{ACCESS_KEY, SECRET_KEY}
-import org.apache.spark.sql.{CarbonEnv, SparkSession}
+import org.apache.spark.sql.SparkSession
 import org.slf4j.{Logger, LoggerFactory}
 
 object S3CsvExample {
@@ -42,8 +42,7 @@ object S3CsvExample {
       System.exit(0)
     }
 
-    val spark =
-      SparkSession
+    val spark = SparkSession
       .builder()
       .master(args(3))
       .appName("S3CsvExample")
@@ -53,8 +52,6 @@ object S3CsvExample {
       .config("spark.sql.extensions", "org.apache.spark.sql.CarbonExtensions")
       .getOrCreate()
 
-    CarbonEnv.getInstance(spark)
-
     spark.sparkContext.setLogLevel("INFO")
 
     spark.sql(
@@ -71,7 +68,7 @@ object S3CsvExample {
          | charField CHAR(5),
          | floatField FLOAT
          | )
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
          | LOCATION '$rootPath/examples/spark2/target/store'
        """.stripMargin)
 
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3Example.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3Example.scala
index 0d22198..8774236 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3Example.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3Example.scala
@@ -18,7 +18,7 @@ package org.apache.carbondata.examples
 
 import java.io.File
 
-import org.apache.spark.sql.{CarbonEnv, Row, SparkSession}
+import org.apache.spark.sql.{Row, SparkSession}
 import org.slf4j.{Logger, LoggerFactory}
 
 import org.apache.carbondata.spark.util.CarbonSparkUtil
@@ -60,8 +60,6 @@ object S3Example {
       .config("spark.sql.extensions", "org.apache.spark.sql.CarbonExtensions")
       .getOrCreate()
 
-    CarbonEnv.getInstance(spark)
-
     spark.sparkContext.setLogLevel("WARN")
 
     spark.sql("Drop table if exists carbon_table")
@@ -79,7 +77,7 @@ object S3Example {
          | charField CHAR(5),
          | floatField FLOAT
          | )
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
          | LOCATION '${args(2)}'
        """.stripMargin)
 
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
index 34eca3b..d3b67bc 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3UsingSDkExample.scala
@@ -17,7 +17,7 @@
 package org.apache.carbondata.examples
 
 import org.apache.hadoop.fs.s3a.Constants.{ACCESS_KEY, ENDPOINT, SECRET_KEY}
-import org.apache.spark.sql.{CarbonEnv, SparkSession}
+import org.apache.spark.sql.SparkSession
 import org.slf4j.{Logger, LoggerFactory}
 
 import org.apache.carbondata.core.metadata.datatype.DataTypes
@@ -100,8 +100,6 @@ object S3UsingSdkExample {
       .config("spark.sql.extensions", "org.apache.spark.sql.CarbonExtensions")
       .getOrCreate()
 
-    CarbonEnv.getInstance(spark)
-
     spark.sparkContext.setLogLevel("WARN")
     val path = if (args.length < 3) {
       "s3a://sdk/WriterOutput2 "
@@ -116,7 +114,7 @@ object S3UsingSdkExample {
     buildTestData(args, path, num)
 
     spark.sql("DROP TABLE IF EXISTS s3_sdk_table")
-    spark.sql(s"CREATE EXTERNAL TABLE s3_sdk_table STORED BY 'carbondata'" +
+    spark.sql(s"CREATE EXTERNAL TABLE s3_sdk_table STORED AS carbondata" +
       s" LOCATION '$path'")
     spark.sql("SELECT * FROM s3_sdk_table LIMIT 10").show()
     spark.stop()
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
index 27a0134..9651471 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkSessionExample.scala
@@ -24,48 +24,54 @@ import org.apache.spark.sql.SparkSession
 
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.examples.util.ExampleUtils
 
 /**
  * This example doesn't create carbonsession, but use CarbonSource when creating table
  */
 
 object SparkSessionExample {
-
+  val rootPath = new File(this.getClass.getResource("/").getPath
+                          + "../../../..").getCanonicalPath
   def main(args: Array[String]): Unit = {
-    val rootPath = new File(this.getClass.getResource("/").getPath
-                            + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/examples/spark2/target/store"
-    val warehouse = s"$rootPath/examples/spark2/target/warehouse"
-    val metaStoreDB = s"$rootPath/examples/spark2/target/metastore_db"
-
-    // clean data folder
-    if (true) {
-      val clean = (path: String) => FileUtils.deleteDirectory(new File(path))
-      clean(storeLocation)
-      clean(warehouse)
-      clean(metaStoreDB)
-    }
-
-    val sparksession = SparkSession
-      .builder()
-      .master("local")
-      .appName("SparkSessionExample")
-      .enableHiveSupport()
-      .config("spark.sql.warehouse.dir", warehouse)
-      .config("javax.jdo.option.ConnectionURL",
-        s"jdbc:derby:;databaseName=$metaStoreDB;create=true")
-      .getOrCreate()
-
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd HH:mm:ss")
-      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
-      .addProperty("carbon.storelocation", storeLocation)
-
-    sparksession.sparkContext.setLogLevel("ERROR")
+    val sparkSession = ExampleUtils.createSparkSession("SparkSessionExample")
+    val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
+    sparkSession.sql("DROP TABLE IF EXISTS csv_table")
+    sparkSession.sql(
+      s"""
+         | CREATE TABLE csv_table(
+         | shortField SHORT,
+         | intField INT,
+         | bigintField LONG,
+         | doubleField DOUBLE,
+         | stringField STRING,
+         | timestampField STRING,
+         | decimalField DECIMAL(18,2),
+         | dateField STRING,
+         | charField CHAR(5))
+         | ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
+       """.stripMargin)
+
+    sparkSession.sql(
+      s"""
+         | LOAD DATA LOCAL INPATH '$path'
+         | INTO TABLE csv_table
+       """.stripMargin)
+
+    sparkSession.sql("SELECT * FROM csv_table").show()
+
+    sparkTableExample(sparkSession)
+    hiveTableExample(sparkSession)
+
+    // Drop table
+    sparkSession.sql("DROP TABLE IF EXISTS csv_table")
+    sparkSession.stop()
+  }
 
+  def sparkTableExample(sparkSession: SparkSession): Unit = {
     // Create table
-    sparksession.sql("DROP TABLE IF EXISTS sparksession_table")
-    sparksession.sql(
+    sparkSession.sql("DROP TABLE IF EXISTS sparksession_table")
+    sparkSession.sql(
       s"""
          | CREATE TABLE sparksession_table(
          | shortField SHORT,
@@ -79,107 +85,95 @@ object SparkSessionExample {
          | charField CHAR(5)
          | )
          | USING carbondata
-         | OPTIONS('tableName'='sparksession_table')
        """.stripMargin)
 
-    val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
+    validateTable(sparkSession, "sparksession_table")
+
+    sparkSession.sql("DROP TABLE IF EXISTS sparksession_table")
+  }
 
-    sparksession.sql("DROP TABLE IF EXISTS csv_table")
-    sparksession.sql(
+  def hiveTableExample(sparkSession: SparkSession): Unit = {
+    // Create table
+    sparkSession.sql("DROP TABLE IF EXISTS sparksession_hive_table")
+    sparkSession.sql(
       s"""
-         | CREATE TABLE csv_table(
+         | CREATE TABLE sparksession_hive_table(
          | shortField SHORT,
          | intField INT,
          | bigintField LONG,
          | doubleField DOUBLE,
          | stringField STRING,
-         | timestampField STRING,
+         | timestampField TIMESTAMP,
          | decimalField DECIMAL(18,2),
-         | dateField STRING,
-         | charField CHAR(5))
-         | ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
-       """.stripMargin)
-
-    sparksession.sql(
-      s"""
-         | LOAD DATA LOCAL INPATH '$path'
-         | INTO TABLE csv_table
+         | dateField DATE,
+         | charField CHAR(5)
+         | )
+         | STORED AS carbondata
        """.stripMargin)
 
-    sparksession.sql("SELECT * FROM csv_table").show()
+    validateTable(sparkSession, "sparksession_hive_table")
+  }
 
-    sparksession.sql(
+  def validateTable(sparkSession: SparkSession, tableName: String): Unit = {
+    sparkSession.sql(
       s"""
-         | INSERT INTO TABLE sparksession_table
+         | INSERT INTO TABLE $tableName
          | SELECT shortField, intField, bigintField, doubleField, stringField,
          | from_unixtime(unix_timestamp(timestampField,'yyyy/MM/dd HH:mm:ss')) timestampField,
          | decimalField,from_unixtime(unix_timestamp(dateField,'yyyy/MM/dd')), charField
          | FROM csv_table
        """.stripMargin)
 
-    sparksession.sql("SELECT * FROM sparksession_table").show()
+    sparkSession.sql(s"SELECT * FROM $tableName").show()
 
-    sparksession.sql(
+    sparkSession.sql(
       s"""
          | SELECT *
-         | FROM sparksession_table
+         | FROM $tableName
          | WHERE stringfield = 'spark' AND decimalField > 40
       """.stripMargin).show()
 
     // Shows with raw data's timestamp format
-    sparksession.sql(
+    sparkSession.sql(
       s"""
          | SELECT
          | stringField, date_format(timestampField, "yyyy/MM/dd HH:mm:ss") AS
          | timestampField
-         | FROM sparksession_table WHERE length(stringField) = 5
+         | FROM $tableName WHERE length(stringField) = 5
        """.stripMargin).show()
 
-    sparksession.sql(
+    sparkSession.sql(
       s"""
          | SELECT *
-         | FROM sparksession_table where date_format(dateField, "yyyy-MM-dd") = "2015-07-23"
+         | FROM $tableName where date_format(dateField, "yyyy-MM-dd") = "2015-07-23"
        """.stripMargin).show()
 
-    sparksession.sql("SELECT count(stringField) FROM sparksession_table").show()
+    sparkSession.sql(s"SELECT count(stringField) FROM $tableName").show()
 
-    sparksession.sql(
+    sparkSession.sql(
       s"""
          | SELECT sum(intField), stringField
-         | FROM sparksession_table
+         | FROM $tableName
          | GROUP BY stringField
        """.stripMargin).show()
 
-    sparksession.sql(
+    sparkSession.sql(
       s"""
          | SELECT t1.*, t2.*
-         | FROM sparksession_table t1, sparksession_table t2
+         | FROM $tableName t1, $tableName t2
          | WHERE t1.stringField = t2.stringField
       """.stripMargin).show()
 
-    sparksession.sql(
+    sparkSession.sql(
       s"""
          | WITH t1 AS (
-         | SELECT * FROM sparksession_table
+         | SELECT * FROM $tableName
          | UNION ALL
-         | SELECT * FROM sparksession_table
+         | SELECT * FROM $tableName
          | )
          | SELECT t1.*, t2.*
-         | FROM t1, sparksession_table t2
+         | FROM t1, $tableName t2
          | WHERE t1.stringField = t2.stringField
       """.stripMargin).show()
-
-    CarbonProperties.getInstance().addProperty(
-      CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
-      CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
-    CarbonProperties.getInstance().addProperty(
-      CarbonCommonConstants.CARBON_DATE_FORMAT,
-      CarbonCommonConstants.CARBON_DATE_DEFAULT_FORMAT)
-
-    // Drop table
-    sparksession.sql("DROP TABLE IF EXISTS sparksession_table")
-    sparksession.sql("DROP TABLE IF EXISTS csv_table")
-
-    sparksession.stop()
   }
 }
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkStreamingExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkStreamingExample.scala
index d66fff4..29b05cd 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkStreamingExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/SparkStreamingExample.scala
@@ -50,7 +50,7 @@ object SparkStreamingExample {
       System.currentTimeMillis().toString()
     val streamTableName = s"dstream_stream_table"
 
-    val spark = ExampleUtils.createCarbonSession("SparkStreamingExample", 4)
+    val spark = ExampleUtils.createSparkSession("SparkStreamingExample", 4)
 
     val requireCreateTable = true
 
@@ -66,7 +66,7 @@ object SparkStreamingExample {
            | city STRING,
            | salary FLOAT
            | )
-           | STORED BY 'carbondata'
+           | STORED AS carbondata
            | TBLPROPERTIES(
            | 'streaming'='true',
            | 'sort_columns'='name')
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StandardPartitionExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/StandardPartitionExample.scala
index e04d934..c6c1e0a 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StandardPartitionExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/StandardPartitionExample.scala
@@ -32,7 +32,7 @@ import org.apache.carbondata.examples.util.ExampleUtils
 object StandardPartitionExample {
 
   def main(args: Array[String]) {
-    val spark = ExampleUtils.createCarbonSession("StandardPartitionExample")
+    val spark = ExampleUtils.createSparkSession("StandardPartitionExample")
     exampleBody(spark)
     spark.close()
   }
@@ -59,7 +59,7 @@ object StandardPartitionExample {
         | country String,
         | area String,
         | salary Int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
 
     spark.sql(
@@ -82,7 +82,7 @@ object StandardPartitionExample {
         | area String,
         | salary Int)
         | PARTITIONED BY (logdate Date)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | TBLPROPERTIES('SORT_COLUMNS'='id,vin')
       """.stripMargin)
 
@@ -151,7 +151,7 @@ object StandardPartitionExample {
         | city String,
         | population Int)
         | PARTITIONED BY (country String)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
 
     df.write.format("carbondata")
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamSQLExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamSQLExample.scala
index 857a7ae..97c45dd 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamSQLExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamSQLExample.scala
@@ -25,7 +25,7 @@ import org.apache.carbondata.examples.util.ExampleUtils
 object StreamSQLExample {
   def main(args: Array[String]) {
 
-    val spark = ExampleUtils.createCarbonSession("StructuredStreamingExample", 4)
+    val spark = ExampleUtils.createSparkSession("StructuredStreamingExample", 4)
     val requireCreateTable = true
     val recordFormat = "json" // can be "json" or "csv"
 
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingUsingBatchLoadExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingUsingBatchLoadExample.scala
index 3354a77..9d5346b 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingUsingBatchLoadExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingUsingBatchLoadExample.scala
@@ -46,7 +46,7 @@ object StreamingUsingBatchLoadExample {
       System.currentTimeMillis().toString()
     val streamTableName = s"dstream_batch_table"
 
-    val spark = ExampleUtils.createCarbonSession("StreamingUsingBatchLoadExample", 4)
+    val spark = ExampleUtils.createSparkSession("StreamingUsingBatchLoadExample", 4)
 
     val requireCreateTable = true
 
@@ -63,7 +63,7 @@ object StreamingUsingBatchLoadExample {
            | city STRING,
            | salary FLOAT
            | )
-           | STORED BY 'carbondata'
+           | STORED AS carbondata
            | TBLPROPERTIES(
            | 'sort_columns'='name',
            | 'AUTO_LOAD_MERGE'='true',
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingWithRowParserExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingWithRowParserExample.scala
index 07c5a8b..cd206b6 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingWithRowParserExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/StreamingWithRowParserExample.scala
@@ -37,7 +37,7 @@ object StreamingWithRowParserExample {
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
 
-    val spark = ExampleUtils.createCarbonSession("StreamingWithRowParserExample", 4)
+    val spark = ExampleUtils.createSparkSession("StreamingWithRowParserExample", 4)
     val streamTableName = s"stream_table_with_row_parser"
 
     val requireCreateTable = true
@@ -57,7 +57,7 @@ object StreamingWithRowParserExample {
              | salary FLOAT,
              | file struct<school:array<string>, age:int>
              | )
-             | STORED BY 'carbondata'
+             | STORED AS carbondata
              | TBLPROPERTIES(
              | 'streaming'='true', 'sort_columns'='name')
              | """.stripMargin)
@@ -70,7 +70,7 @@ object StreamingWithRowParserExample {
              | city STRING,
              | salary FLOAT
              | )
-             | STORED BY 'carbondata'
+             | STORED AS carbondata
              | TBLPROPERTIES(
              | 'streaming'='true', 'sort_columns'='name')
              | """.stripMargin)
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StructuredStreamingExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/StructuredStreamingExample.scala
index cecc767..0f7dc44 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/StructuredStreamingExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/StructuredStreamingExample.scala
@@ -36,7 +36,7 @@ object StructuredStreamingExample {
     val rootPath = new File(this.getClass.getResource("/").getPath
                             + "../../../..").getCanonicalPath
 
-    val spark = ExampleUtils.createCarbonSession("StructuredStreamingExample", 4)
+    val spark = ExampleUtils.createSparkSession("StructuredStreamingExample", 4)
     val streamTableName = s"stream_table"
 
     val requireCreateTable = true
@@ -55,7 +55,7 @@ object StructuredStreamingExample {
              | salary FLOAT,
              | file struct<school:array<string>, age:int>
              | )
-             | STORED BY 'carbondata'
+             | STORED AS carbondata
              | TBLPROPERTIES(
              | 'streaming'='true', 'sort_columns'='name')
              | """.stripMargin)
@@ -67,7 +67,7 @@ object StructuredStreamingExample {
              | name STRING,
              | salary FLOAT
              | )
-             | STORED BY 'carbondata'
+             | STORED AS carbondata
              | TBLPROPERTIES(
              | 'streaming'='true', 'sort_columns'='name')
              | """.stripMargin)
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/TableLevelCompactionOptionExample.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/TableLevelCompactionOptionExample.scala
index 17b299e..f2fbbfb 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/TableLevelCompactionOptionExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/TableLevelCompactionOptionExample.scala
@@ -29,7 +29,7 @@ import org.apache.carbondata.examples.util.ExampleUtils
 object TableLevelCompactionOptionExample {
 
   def main(args: Array[String]) {
-    val spark = ExampleUtils.createCarbonSession("TableLevelCompactionOptionExample")
+    val spark = ExampleUtils.createSparkSession("TableLevelCompactionOptionExample")
     exampleBody(spark)
     spark.close()
   }
@@ -54,7 +54,8 @@ object TableLevelCompactionOptionExample {
          | serialname String,
          | salary Int,
          | floatField float
-         | ) STORED BY 'carbondata'
+         | )
+         | STORED AS carbondata
          | TBLPROPERTIES (
          | 'MAJOR_COMPACTION_SIZE'='1024',
          | 'AUTO_LOAD_MERGE'='true',
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
index 483834d..070d709 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/util/ExampleUtils.scala
@@ -55,6 +55,7 @@ object ExampleUtils {
     } else {
       "local[" + workThreadNum.toString() + "]"
     }
+    import org.apache.spark.sql.CarbonSession._
 
     val spark = SparkSession
       .builder()
@@ -63,12 +64,38 @@ object ExampleUtils {
       .config("spark.sql.warehouse.dir", warehouse)
       .config("spark.driver.host", "localhost")
       .config("spark.sql.crossJoin.enabled", "true")
+      .enableHiveSupport()
+      .getOrCreateCarbonSession(storeLocation, metaStoreDB)
+    CarbonEnv.getInstance(spark)
+    spark.sparkContext.setLogLevel("ERROR")
+    spark
+  }
+
+  def createSparkSession(appName: String, workThreadNum: Int = 1): SparkSession = {
+    val rootPath = new File(this.getClass.getResource("/").getPath
+                            + "../../../..").getCanonicalPath
+    val warehouse = s"$rootPath/examples/spark2/target/warehouse"
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd HH:mm:ss")
+      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
+      .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE, "true")
+      .addProperty(CarbonCommonConstants.CARBON_BADRECORDS_LOC, "")
+    val masterUrl = if (workThreadNum <= 1) {
+      "local"
+    } else {
+      "local[" + workThreadNum.toString() + "]"
+    }
+    val spark = SparkSession
+      .builder()
+      .master(masterUrl)
+      .appName(appName)
+      .config("spark.sql.warehouse.dir", warehouse)
+      .config("spark.driver.host", "localhost")
+      .config("spark.sql.crossJoin.enabled", "true")
       .config("spark.sql.extensions", "org.apache.spark.sql.CarbonExtensions")
       .enableHiveSupport()
       .getOrCreate()
-
     CarbonEnv.getInstance(spark)
-
     spark.sparkContext.setLogLevel("ERROR")
     spark
   }
diff --git a/examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala b/examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
index 921760e..95822db 100644
--- a/examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
+++ b/examples/spark2/src/test/scala/org/apache/carbondata/examplesCI/RunExamples.scala
@@ -122,7 +122,7 @@ class RunExamples extends QueryTest with BeforeAndAfterAll {
   }
 
   test("HiveExample") {
-    HiveExample.createCarbonTable(TestQueryExecutor.warehouse)
+    HiveExample.createCarbonTable(spark)
     HiveExample.readFromHive
   }
 
diff --git a/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/Spark2QueryTest.scala b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonFileHiveSerDe.java
similarity index 68%
copy from integration/spark2/src/test/scala/org/apache/spark/sql/common/util/Spark2QueryTest.scala
copy to integration/hive/src/main/java/org/apache/carbondata/hive/CarbonFileHiveSerDe.java
index 2edd7f8..2292e43 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/Spark2QueryTest.scala
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonFileHiveSerDe.java
@@ -15,20 +15,7 @@
  * limitations under the License.
  */
 
-package org.apache.spark.sql.common.util
+package org.apache.carbondata.hive;
 
-import org.apache.spark.sql.hive.{HiveExternalCatalog, HiveSessionCatalog}
-import org.apache.spark.sql.test.util.QueryTest
-
-
-class Spark2QueryTest extends QueryTest {
-
-  val hiveClient = sqlContext
-    .sparkSession
-    .sessionState
-    .catalog
-    .externalCatalog
-    .asInstanceOf[HiveExternalCatalog]
-    .client
-
-}
\ No newline at end of file
+public class CarbonFileHiveSerDe extends CarbonHiveSerDe {
+}
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java
index a09d249..9f38571 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveSerDe.java
@@ -24,7 +24,17 @@ import java.util.List;
 import java.util.Properties;
 import javax.annotation.Nullable;
 
+import org.apache.carbondata.common.logging.LogServiceFactory;
+import org.apache.carbondata.core.datastore.impl.FileFactory;
+import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
+import org.apache.carbondata.core.metadata.schema.SchemaReader;
+import org.apache.carbondata.core.metadata.schema.table.CarbonTable;
+import org.apache.carbondata.core.metadata.schema.table.TableInfo;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
+import org.apache.carbondata.core.util.path.CarbonTablePath;
+
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hive.metastore.api.hive_metastoreConstants;
 import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.AbstractSerDe;
 import org.apache.hadoop.hive.serde2.SerDeException;
@@ -53,6 +63,7 @@ import org.apache.hadoop.io.ArrayWritable;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Writable;
+import org.apache.log4j.Logger;
 
 /**
  * A serde class for Carbondata.
@@ -60,6 +71,10 @@ import org.apache.hadoop.io.Writable;
  */
 @SerDeSpec(schemaProps = { serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES })
 public class CarbonHiveSerDe extends AbstractSerDe {
+
+  private static final Logger LOGGER =
+      LogServiceFactory.getLogService(CarbonHiveSerDe.class.getCanonicalName());
+
   private final SerDeStats stats;
   private ObjectInspector objInspector;
 
@@ -99,7 +114,7 @@ public class CarbonHiveSerDe extends AbstractSerDe {
       columnTypes = TypeInfoUtils.getTypeInfosFromTypeString(columnTypeProperty);
     }
 
-
+    inferSchema(tbl, columnNames, columnTypes);
 
     // Create row related objects
     rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
@@ -111,6 +126,40 @@ public class CarbonHiveSerDe extends AbstractSerDe {
     status = LAST_OPERATION.UNKNOWN;
   }
 
+  private void inferSchema(Properties tbl, List<String> columnNames, List<TypeInfo> columnTypes) {
+    if (columnNames.size() == 0 && columnTypes.size() == 0) {
+      String external = tbl.getProperty("EXTERNAL");
+      String location = tbl.getProperty(hive_metastoreConstants.META_TABLE_LOCATION);
+      if (external != null && "TRUE".equals(external) && location != null) {
+        String[] names =
+            tbl.getProperty(hive_metastoreConstants.META_TABLE_NAME).split("\\.");
+        if (names.length == 2) {
+          AbsoluteTableIdentifier identifier =
+              AbsoluteTableIdentifier.from(location, names[0], names[1]);
+          String schemaPath = CarbonTablePath.getSchemaFilePath(identifier.getTablePath());
+          try {
+            TableInfo tableInfo = null;
+            if (!FileFactory.isFileExist(schemaPath)) {
+              tableInfo = SchemaReader.inferSchema(identifier, false);
+            } else {
+              tableInfo = SchemaReader.getTableInfo(identifier);
+            }
+            if (tableInfo != null) {
+              CarbonTable carbonTable = CarbonTable.buildFromTableInfo(tableInfo);
+              List<CarbonColumn> columns = carbonTable.getCreateOrderColumn();
+              for (CarbonColumn column : columns) {
+                columnNames.add(column.getColName());
+                columnTypes.add(HiveDataTypeUtils.convertCarbonDataTypeToHive(column));
+              }
+            }
+          } catch (Exception ex) {
+            LOGGER.warn("Failed to infer schema: " + ex.getMessage());
+          }
+        }
+      }
+    }
+  }
+
   @Override
   public Class<? extends Writable> getSerializedClass() {
     return ArrayWritable.class;
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
index 160a5ac..f3cbf44 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
@@ -21,12 +21,14 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
+import java.util.Objects;
 
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.SettableStructObjectInspector;
 import org.apache.hadoop.hive.serde2.objectinspector.StructField;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
 import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveVarcharObjectInspector;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
@@ -62,10 +64,14 @@ class CarbonObjectInspector extends SettableStructObjectInspector {
   private ObjectInspector getObjectInspector(final TypeInfo typeInfo) {
     if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
       return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
+    } else if (typeInfo instanceof CharTypeInfo) {
+      return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
     } else if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
       return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
     } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
       return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
+    } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
+      return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
     } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
       return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
     } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
@@ -181,8 +187,7 @@ class CarbonObjectInspector extends SettableStructObjectInspector {
       return false;
     }
     final CarbonObjectInspector other = (CarbonObjectInspector) obj;
-    return !(this.typeInfo != other.typeInfo && (this.typeInfo == null || !this.typeInfo
-        .equals(other.typeInfo)));
+    return !(!Objects.equals(this.typeInfo, other.typeInfo));
   }
 
   @Override
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/HiveDataTypeUtils.java b/integration/hive/src/main/java/org/apache/carbondata/hive/HiveDataTypeUtils.java
new file mode 100644
index 0000000..33507b4
--- /dev/null
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/HiveDataTypeUtils.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.hive;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.carbondata.core.metadata.datatype.DataTypes;
+import org.apache.carbondata.core.metadata.datatype.DecimalType;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
+import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
+
+import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+
+public class HiveDataTypeUtils {
+
+  public static TypeInfo convertCarbonDataTypeToHive(CarbonColumn column) {
+    int id = column.getDataType().getId();
+    if (id == DataTypes.STRING.getId()) {
+      return TypeInfoFactory.stringTypeInfo;
+    } else if (id == DataTypes.DATE.getId()) {
+      return TypeInfoFactory.dateTypeInfo;
+    } else if (id == DataTypes.TIMESTAMP.getId()) {
+      return TypeInfoFactory.timestampTypeInfo;
+    } else if (id == DataTypes.BOOLEAN.getId()) {
+      return TypeInfoFactory.booleanTypeInfo;
+    } else if (id == DataTypes.BYTE.getId()) {
+      return TypeInfoFactory.byteTypeInfo;
+    } else if (id == DataTypes.SHORT.getId()) {
+      return TypeInfoFactory.shortTypeInfo;
+    } else if (id == DataTypes.INT.getId()) {
+      return TypeInfoFactory.intTypeInfo;
+    } else if (id == DataTypes.LONG.getId()) {
+      return TypeInfoFactory.longTypeInfo;
+    } else if (id == DataTypes.FLOAT.getId()) {
+      return TypeInfoFactory.floatTypeInfo;
+    } else if (id == DataTypes.DOUBLE.getId()) {
+      return TypeInfoFactory.doubleTypeInfo;
+    } else if (id == DataTypes.DECIMAL_TYPE_ID) {
+      DecimalType type = (DecimalType) column.getDataType();
+      return new DecimalTypeInfo(type.getPrecision(), type.getScale());
+    } else if (id == DataTypes.BINARY.getId()) {
+      return TypeInfoFactory.binaryTypeInfo;
+    } else if (id == DataTypes.ARRAY_TYPE_ID) {
+      ListTypeInfo typeInfo = new ListTypeInfo();
+      if (!(column instanceof CarbonDimension)) {
+        throw new RuntimeException(
+            "Failed to get child columns of column: " + column.getColName());
+      }
+      typeInfo.setListElementTypeInfo(
+          convertCarbonDataTypeToHive(
+              ((CarbonDimension) column).getListOfChildDimensions().get(0)
+          )
+      );
+      return typeInfo;
+    } else if (id == DataTypes.STRUCT_TYPE_ID) {
+      StructTypeInfo typeInfo = new StructTypeInfo();
+      if (!(column instanceof CarbonDimension)) {
+        throw new RuntimeException(
+            "Failed to get child columns of column: " + column.getColName());
+      }
+      List<CarbonDimension> listOfChildDimensions =
+          ((CarbonDimension) column).getListOfChildDimensions();
+      ArrayList<String> allStructFieldNames = new ArrayList<>(listOfChildDimensions.size());
+      ArrayList<TypeInfo> allStructFieldTypeInfos = new ArrayList<>(listOfChildDimensions.size());
+      typeInfo.setAllStructFieldNames(allStructFieldNames);
+      typeInfo.setAllStructFieldTypeInfos(allStructFieldTypeInfos);
+      for (CarbonDimension dimension : listOfChildDimensions) {
+        String[] columnsNames = dimension.getColName().split("\\.");
+        allStructFieldNames.add(columnsNames[columnsNames.length - 1]);
+        allStructFieldTypeInfos.add(convertCarbonDataTypeToHive(dimension));
+      }
+      return typeInfo;
+    } else if (id == DataTypes.MAP_TYPE_ID) {
+      MapTypeInfo typeInfo = new MapTypeInfo();
+      List<CarbonDimension> listOfChildDimensions = ((CarbonDimension) column)
+          .getListOfChildDimensions()
+          .get(0)
+          .getListOfChildDimensions();
+      typeInfo.setMapKeyTypeInfo(convertCarbonDataTypeToHive(listOfChildDimensions.get(0)));
+      typeInfo.setMapValueTypeInfo(convertCarbonDataTypeToHive(listOfChildDimensions.get(1)));
+      return typeInfo;
+    } else if (id == DataTypes.VARCHAR.getId()) {
+      return TypeInfoFactory.varcharTypeInfo;
+    } else {
+      throw new RuntimeException("convert DataType with invalid id: " + id);
+    }
+  }
+}
diff --git a/integration/spark-carbon-common-test/pom.xml b/integration/spark-carbon-common-test/pom.xml
deleted file mode 100644
index ccf86ea..0000000
--- a/integration/spark-carbon-common-test/pom.xml
+++ /dev/null
@@ -1,438 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<!--
-    Licensed to the Apache Software Foundation (ASF) under one or more
-    contributor license agreements.  See the NOTICE file distributed with
-    this work for additional information regarding copyright ownership.
-    The ASF licenses this file to You under the Apache License, Version 2.0
-    (the "License"); you may not use this file except in compliance with
-    the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing, software
-    distributed under the License is distributed on an "AS IS" BASIS,
-    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    See the License for the specific language governing permissions and
-    limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-
-  <modelVersion>4.0.0</modelVersion>
-
-  <parent>
-    <groupId>org.apache.carbondata</groupId>
-    <artifactId>carbondata-parent</artifactId>
-    <version>2.0.0-SNAPSHOT</version>
-    <relativePath>../../pom.xml</relativePath>
-  </parent>
-
-  <artifactId>carbondata-spark-carbon-common-test</artifactId>
-  <name>Apache CarbonData :: Spark Carbon Common Test</name>
-
-  <properties>
-    <dev.path>${basedir}/../../dev</dev.path>
-    <jacoco.append>true</jacoco.append>
-    <build.directory.projectCommon>../../common/target</build.directory.projectCommon>
-    <build.directory.projectCore>../../core/target</build.directory.projectCore>
-    <build.directory.projectProcessing>../../processing/target</build.directory.projectProcessing>
-    <build.directory.projectHadoop>../../hadoop/target</build.directory.projectHadoop>
-    <build.directory.projectFormat>../../format/target</build.directory.projectFormat>
-    <build.directory.projectSpark>../../integration/spark/target</build.directory.projectSpark>
-    <build.directory.projectSpark2>../../integration/spark2/target</build.directory.projectSpark2>
-    <build.directory.projectSparkCommon>../../integration/spark-common/target</build.directory.projectSparkCommon>
-    <build.directory.projectSparkCommonTest>../../integration/spark-common-test/target</build.directory.projectSparkCommonTest>
-    <!--<build.directory.projectHive>../../integration/hive/target</build.directory.projectHive>-->
-    <!--<build.directory.projectPresto>../../integration/presto/target</build.directory.projectPresto>-->
-    <build.directory.projectStoreSdk>../../store/sdk/target</build.directory.projectStoreSdk>
-    <build.directory.projectStreaming>../../streaming/target</build.directory.projectStreaming>
-    <build.directory.projectBloom>../../datamap/bloom/target</build.directory.projectBloom>
-    <build.directory.projectLucene>../../datamap/lucene/target</build.directory.projectLucene>
-
-    <classes.directory.projectCommon>../../common/target/classes</classes.directory.projectCommon>
-    <classes.directory.projectCore>../../core/target/classes</classes.directory.projectCore>
-    <classes.directory.projectProcessing>../../processing/target/classes</classes.directory.projectProcessing>
-    <classes.directory.projectHadoop>../../hadoop/target/classes</classes.directory.projectHadoop>
-    <classes.directory.projectFormat>../../format/target/classes</classes.directory.projectFormat>
-    <classes.directory.projectSpark>../../integration/spark/target/classes</classes.directory.projectSpark>
-    <classes.directory.projectSpark2>../../integration/spark2/target/classes</classes.directory.projectSpark2>
-    <classes.directory.projectSparkCommon>../../integration/spark-common/target/classes</classes.directory.projectSparkCommon>
-    <classes.directory.projectSparkCommonTest>../../integration/spark-common-test/target/classes</classes.directory.projectSparkCommonTest>
-    <!--<classes.directory.projectHive>../../integration/hive/target/classes</classes.directory.projectHive>-->
-    <!--<classes.directory.projectPresto>../../integration/presto/target/classes</classes.directory.projectPresto>-->
-    <classes.directory.projectStoreSdk>../../store/sdk/target/classes</classes.directory.projectStoreSdk>
-    <classes.directory.projectStreaming>../../streaming/target/classes</classes.directory.projectStreaming>
-    <classes.directory.projectBloom>../../datamap/bloom/target/classes</classes.directory.projectBloom>
-    <classes.directory.projectLucene>../../datamap/lucene/target/classes</classes.directory.projectLucene>
-
-    <sources.directory.projectCommon>../../common/src/main/java</sources.directory.projectCommon>
-    <sources.directory.projectCore>../../core/src/main/java</sources.directory.projectCore>
-    <sources.directory.projectProcessing>../../processing/src/main/java</sources.directory.projectProcessing>
-    <sources.directory.projectHadoop>../../hadoop/src/main/java</sources.directory.projectHadoop>
-    <sources.directory.projectFormat>../../format/src/main/thrift</sources.directory.projectFormat>
-    <sources.directory.projectSpark>../../integration/spark/src/main/scala</sources.directory.projectSpark>
-    <sources.directory.projectSpark>../../integration/spark/src/main/java</sources.directory.projectSpark>
-    <sources.directory.projectSpark2>../../integration/spark2/src/main/java</sources.directory.projectSpark2>
-    <sources.directory.projectSpark2>../../integration/spark2/src/main/scala</sources.directory.projectSpark2>
-    <sources.directory.projectSparkCommon>../../integration/spark-common/src/main/java</sources.directory.projectSparkCommon>
-    <sources.directory.projectSparkCommon>../../integration/spark-common/src/main/scala</sources.directory.projectSparkCommon>
-    <!--<sources.directory.projectHive>../../integration/hive/src/main/java</sources.directory.projectHive>-->
-    <!--<sources.directory.projectHive>../../integration/hive/src/main/scala</sources.directory.projectHive>-->
-    <!--<sources.directory.projectPresto>../../integration/presto/src/main/java</sources.directory.projectPresto>-->
-    <!--<sources.directory.projectPresto>../../integration/presto/src/main/scala</sources.directory.projectPresto>-->
-    <sources.directory.projectStoreSdk>../../store/sdk/src/main/java</sources.directory.projectStoreSdk>
-    <sources.directory.projectStreaming>../../streaming/src/main/java</sources.directory.projectStreaming>
-    <sources.directory.projectStreaming>../../streaming/src/main/scala</sources.directory.projectStreaming>
-    <sources.directory.projectBloom>../../datamap/bloom/src/main/java</sources.directory.projectBloom>
-    <sources.directory.projectLucene>../../datamap/lucene/src/main/java</sources.directory.projectLucene>
-
-    <generated-sources.directory.projectCommon>../../common/target/generated-sources/annotations</generated-sources.directory.projectCommon>
-    <generated-sources.directory.projectCore>../../core/target/generated-sources/annotations</generated-sources.directory.projectCore>
-    <generated-sources.directory.projectProcessing>../../processing/target/generated-sources/annotations</generated-sources.directory.projectProcessing>
-    <generated-sources.directory.projectHadoop>../../hadoop/target/generated-sources/annotations</generated-sources.directory.projectHadoop>
-    <generated-sources.directory.projectFormat>../../format/target/generated-sources/annotations</generated-sources.directory.projectFormat>
-    <generated-sources.directory.projectSpark>../../integration/spark/target/generated-sources/annotations</generated-sources.directory.projectSpark>
-    <generated-sources.directory.projectSpark2>../../integration/spark2/target/generated-sources/annotations</generated-sources.directory.projectSpark2>
-    <generated-sources.directory.projectSparkCommon>../../integration/spark-common/target/generated-sources/annotations</generated-sources.directory.projectSparkCommon>
-    <generated-sources.directory.projectSparkCommonTest>../../integration/spark-common-test/target/generated-sources/annotations</generated-sources.directory.projectSparkCommonTest>
-    <!--<generated-sources.directory.projectHive>../../integration/hive/target/generated-sources/annotations</generated-sources.directory.projectHive>-->
-    <!--<generated-sources.directory.projectPresto>../../integration/presto/target/generated-sources/annotations</generated-sources.directory.projectPresto>-->
-    <generated-sources.directory.projectStoreSdk>../../store/sdk/target/generated-sources/annotations</generated-sources.directory.projectStoreSdk>
-    <generated-sources.directory.projectStreaming>../../streaming/target/generated-sources/annotations</generated-sources.directory.projectStreaming>
-    <generated-sources.directory.projectBloom>../../datamap/bloom/target/generated-sources/annotations</generated-sources.directory.projectBloom>
-    <generated-sources.directory.projectLucene>../../datamap/lucene/target/generated-sources/annotations</generated-sources.directory.projectLucene>
-
-  </properties>
-
-
-  <dependencies>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark2</artifactId>
-      <version>${project.version}</version>
-      <exclusions>
-        <exclusion>
-          <groupId>org.apache.hive</groupId>
-          <artifactId>hive-exec</artifactId>
-        </exclusion>
-      </exclusions>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-spark-common-test</artifactId>
-      <version>${project.version}</version>
-      <type>test-jar</type>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-lucene</artifactId>
-      <version>${project.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-bloom</artifactId>
-      <version>${project.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.apache.carbondata</groupId>
-      <artifactId>carbondata-store-sdk</artifactId>
-      <version>${project.version}</version>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <!-- spark catalyst added runtime dependency on spark-core,so
-      while executing the testcases spark-core should be present else it
-      will fail to execute -->
-      <groupId>org.apache.spark</groupId>
-      <artifactId>spark-core_${scala.binary.version}</artifactId>
-      <scope>test</scope>
-      <exclusions>
-        <!-- need to Exclude Avro jar from this project,spark core is using
-        the version 1.7.4 which is not compatible with Carbon -->
-        <exclusion>
-          <groupId>org.apache.avro</groupId>
-          <artifactId>avro</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
-      <groupId>junit</groupId>
-      <artifactId>junit</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.scalatest</groupId>
-      <artifactId>scalatest_${scala.binary.version}</artifactId>
-      <scope>test</scope>
-    </dependency>
-    <dependency>
-      <groupId>org.jmockit</groupId>
-      <artifactId>jmockit</artifactId>
-      <scope>test</scope>
-    </dependency>
-  </dependencies>
-
-  <build>
-    <resources>
-      <resource>
-        <directory>src/resources</directory>
-      </resource>
-      <resource>
-        <directory>.</directory>
-        <includes>
-          <include>CARBON_SPARK_INTERFACELogResource.properties</include>
-        </includes>
-      </resource>
-    </resources>
-    <plugins>
-      <plugin>
-        <groupId>org.scala-tools</groupId>
-        <artifactId>maven-scala-plugin</artifactId>
-        <version>2.15.2</version>
-        <executions>
-          <execution>
-            <id>compile</id>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-            <phase>compile</phase>
-          </execution>
-          <execution>
-            <id>testCompile</id>
-            <goals>
-              <goal>testCompile</goal>
-            </goals>
-            <phase>test</phase>
-          </execution>
-          <execution>
-            <phase>process-resources</phase>
-            <goals>
-              <goal>compile</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <artifactId>maven-compiler-plugin</artifactId>
-        <configuration>
-          <source>1.7</source>
-          <target>1.7</target>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-surefire-plugin</artifactId>
-        <version>2.18</version>
-        <!-- Note config is repeated in scalatest config -->
-        <configuration>
-          <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <argLine>-Xmx3g -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m</argLine>
-          <systemProperties>
-            <java.awt.headless>true</java.awt.headless>
-            <spark.carbon.hive.schema.store>${carbon.hive.based.metastore}</spark.carbon.hive.schema.store>
-          </systemProperties>
-          <failIfNoTests>false</failIfNoTests>
-        </configuration>
-      </plugin>
-      <plugin>
-        <groupId>org.scalatest</groupId>
-        <artifactId>scalatest-maven-plugin</artifactId>
-        <version>1.0</version>
-        <!-- Note config is repeated in surefire config -->
-        <configuration>
-          <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory>
-          <junitxml>.</junitxml>
-          <filereports>CarbonTestSuite.txt</filereports>
-          <argLine> ${argLine} -ea -Xmx3g -XX:MaxPermSize=512m -XX:ReservedCodeCacheSize=512m
-          </argLine>
-          <stderr />
-          <environmentVariables>
-          </environmentVariables>
-          <systemProperties>
-            <java.awt.headless>true</java.awt.headless>
-            <spark.carbon.hive.schema.store>${carbon.hive.based.metastore}</spark.carbon.hive.schema.store>
-          </systemProperties>
-        </configuration>
-        <executions>
-          <execution>
-            <id>test</id>
-            <goals>
-              <goal>test</goal>
-            </goals>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-dependency-plugin</artifactId>
-        <executions>
-          <!-- Copy the ant tasks jar. Needed for ts.jacoco.report-ant . -->
-          <execution>
-            <id>jacoco-dependency-ant</id>
-            <goals>
-              <goal>copy</goal>
-            </goals>
-            <phase>process-test-resources</phase>
-            <inherited>false</inherited>
-            <configuration>
-              <artifactItems>
-                <artifactItem>
-                  <groupId>org.jacoco</groupId>
-                  <artifactId>org.jacoco.ant</artifactId>
-                  <version>0.7.9</version>
-                </artifactItem>
-              </artifactItems>
-              <stripVersion>true</stripVersion>
-              <outputDirectory>${basedir}/target/jacoco-jars</outputDirectory>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-      <plugin>
-        <groupId>org.apache.maven.plugins</groupId>
-        <artifactId>maven-antrun-plugin</artifactId>
-        <version>1.6</version>
-        <executions>
-          <execution>
-            <phase>post-integration-test</phase>
-            <goals>
-              <goal>run</goal>
-            </goals>
-            <configuration>
-              <target>
-                <echo message="Generating JaCoCo Reports" />
-                <taskdef name="report" classname="org.jacoco.ant.ReportTask">
-                  <classpath path="${basedir}/target/jacoco-jars/org.jacoco.ant.jar" />
-                </taskdef>
-                <mkdir dir="${basedir}/target/coverage-report" />
-                <report>
-                  <executiondata>
-                    <fileset dir="${build.directory.projectCommon}">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectCore}">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectProcessing}">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectHadoop}">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectFormat}" erroronmissingdir="false">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectSpark}" erroronmissingdir="false">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectSpark2}" erroronmissingdir="false">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectSparkCommon}">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectSparkCommonTest}">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <!--<fileset dir="${build.directory.projectHive}" erroronmissingdir="false">
-                      <include name="jacoco.exec" />
-                    </fileset>-->
-                    <!--<fileset dir="${build.directory.projectPresto}" erroronmissingdir="false">
-                      <include name="jacoco.exec" />
-                    </fileset>-->
-                    <fileset dir="${build.directory.projectStoreSdk}" erroronmissingdir="false">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectStreaming}" erroronmissingdir="false">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectBloom}" erroronmissingdir="false">
-                      <include name="jacoco.exec" />
-                    </fileset>
-                    <fileset dir="${build.directory.projectLucene}" erroronmissingdir="false">
-                      <include name="jacoco.exec" />
-                    </fileset>
-
-                  </executiondata>
-                  <structure name="jacoco-CarbonData Coverage Project">
-                    <group name="carbondata-coverage">
-                      <classfiles>
-                        <fileset dir="${classes.directory.projectCommon}" />
-                        <fileset dir="${classes.directory.projectCore}" />
-                        <fileset dir="${classes.directory.projectProcessing}" />
-                        <fileset dir="${classes.directory.projectHadoop}" />
-                        <!--<fileset dir="${classes.directory.projectFormat}" erroronmissingdir="false"/>-->
-                        <fileset dir="${classes.directory.projectSpark}" erroronmissingdir="false"/>
-                        <fileset dir="${classes.directory.projectSpark2}" erroronmissingdir="false"/>
-                        <fileset dir="${classes.directory.projectSparkCommon}" />
-                        <fileset dir="${classes.directory.projectSparkCommonTest}" />
-                        <!--<fileset dir="${classes.directory.projectHive}" erroronmissingdir="false" />-->
-                        <!--<fileset dir="${classes.directory.projectPresto}" erroronmissingdir="false" />-->
-                        <fileset dir="${classes.directory.projectStoreSdk}" erroronmissingdir="false" />
-                        <fileset dir="${classes.directory.projectStreaming}" erroronmissingdir="false" />
-                        <fileset dir="${classes.directory.projectBloom}" erroronmissingdir="false" />
-                        <fileset dir="${classes.directory.projectLucene}" erroronmissingdir="false" />
-                      </classfiles>
-                      <sourcefiles encoding="UTF-8">
-                        <fileset dir="${sources.directory.projectCommon}" />
-                        <fileset dir="${sources.directory.projectCore}" />
-                        <fileset dir="${sources.directory.projectProcessing}" />
-                        <fileset dir="${sources.directory.projectHadoop}" />
-                        <!--<fileset dir="${sources.directory.projectFormat}" erroronmissingdir="false"/>-->
-                        <fileset dir="${sources.directory.projectSpark}" erroronmissingdir="false"/>
-                        <fileset dir="${sources.directory.projectSpark2}" erroronmissingdir="false"/>
-                        <fileset dir="${sources.directory.projectSparkCommon}" />
-                        <!--<fileset dir="${sources.directory.projectHive}" erroronmissingdir="false" />-->
-                        <!--<fileset dir="${sources.directory.projectPresto}" erroronmissingdir="false" />-->
-                        <fileset dir="${sources.directory.projectStoreSdk}" erroronmissingdir="false" />
-                        <fileset dir="${sources.directory.projectStreaming}" erroronmissingdir="false" />
-                        <fileset dir="${sources.directory.projectBloom}" erroronmissingdir="false" />
-                        <fileset dir="${sources.directory.projectLucene}" erroronmissingdir="false" />
-
-                      </sourcefiles>
-                    </group>
-                  </structure>
-                  <html destdir="../../target/carbondata-coverage-report/html" />
-                  <xml destfile="../../target/carbondata-coverage-report/carbondata-coverage-report.xml" />
-                  <csv destfile="../../target/carbondata-coverage-report/carbondata-coverage-report.csv" />
-                </report>
-              </target>
-            </configuration>
-          </execution>
-        </executions>
-        <dependencies>
-          <dependency>
-            <groupId>org.jacoco</groupId>
-            <artifactId>org.jacoco.ant</artifactId>
-            <version>0.7.9</version>
-          </dependency>
-        </dependencies>
-      </plugin>
-    </plugins>
-  </build>
-  <profiles>
-    <profile>
-      <id>sdvtest</id>
-      <properties>
-        <maven.test.skip>true</maven.test.skip>
-      </properties>
-    </profile>
-    <profile>
-      <id>build-all</id>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
-            <configuration>
-              <skip>true</skip>
-            </configuration>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
-  </profiles>
-</project>
diff --git a/integration/spark-carbon-common-test/src/test/scala/org/apache/carbondata/spark/util/CarbonSparkQueryTest.scala b/integration/spark-carbon-common-test/src/test/scala/org/apache/carbondata/spark/util/CarbonSparkQueryTest.scala
deleted file mode 100644
index 4128d6c..0000000
--- a/integration/spark-carbon-common-test/src/test/scala/org/apache/carbondata/spark/util/CarbonSparkQueryTest.scala
+++ /dev/null
@@ -1,50 +0,0 @@
-package org.apache.carbondata.spark.util
-
-import org.apache.spark.sql.{CarbonDatasourceHadoopRelation, DataFrame}
-import org.apache.spark.sql.execution.datasources.LogicalRelation
-import org.apache.spark.sql.hive.CarbonRelation
-import org.apache.spark.sql.test.util.QueryTest
-
-class CarbonSparkQueryTest extends QueryTest {
-
-  /**
-   * check whether the pre-aggregate tables are in DataFrame
-   *
-   * @param df DataFrame
-   * @param exists whether the preAggTableNames exists
-   * @param preAggTableNames preAggTableNames
-   */
-  def checkPreAggTable(df: DataFrame, exists: Boolean, preAggTableNames: String*): Unit = {
-    val plan = df.queryExecution.analyzed
-    for (preAggTableName <- preAggTableNames) {
-      var isValidPlan = false
-      plan.transform {
-        // first check if any preaTable1 scala function is applied it is present is in plan
-        // then call is from create preaTable1regate table class so no need to transform the query plan
-        case ca: CarbonRelation =>
-          if (ca.isInstanceOf[CarbonDatasourceHadoopRelation]) {
-            val relation = ca.asInstanceOf[CarbonDatasourceHadoopRelation]
-            if (relation.carbonTable.getTableName.equalsIgnoreCase(preAggTableName)) {
-              isValidPlan = true
-            }
-          }
-          ca
-        case logicalRelation: LogicalRelation =>
-          if (logicalRelation.relation.isInstanceOf[CarbonDatasourceHadoopRelation]) {
-            val relation = logicalRelation.relation.asInstanceOf[CarbonDatasourceHadoopRelation]
-            if (relation.carbonTable.getTableName.equalsIgnoreCase(preAggTableName)) {
-              isValidPlan = true
-            }
-          }
-          logicalRelation
-      }
-
-      if (exists != isValidPlan) {
-        assert(false)
-      } else {
-        assert(true)
-      }
-    }
-  }
-
-}
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala
index b0885d1..10e7b02 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/AlterTableTestCase.scala
@@ -40,7 +40,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check alter table using with alter command in lower case
   test("RenameTable_001_01", Include) {
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""alter table test1 rename to test2""").collect
     checkAnswer(s"""select count(*) from test2""",
@@ -51,7 +51,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check alter table using with alter command in upper & lower case
   test("RenameTable_001_02", Include) {
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""alter table Test1 RENAME to teSt2""").collect
    sql(s"""insert into test2 select 'yy',2""").collect
@@ -63,7 +63,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check alter table using with alter command in upper case
   test("RenameTable_001_03", Include) {
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""alter table test1 RENAME TO test2""").collect
     checkAnswer(s"""select count(*) from test2""",
@@ -74,7 +74,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check alter table where target table speficifed with database name
   test("RenameTable_001_04", Include) {
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""alter table test1 RENAME TO defAult.test2""").collect
     checkAnswer(s"""select count(*) from test2""",
@@ -88,7 +88,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     sql(s"""drop table if exists test2""").collect
     sql(s"""drop table if exists test1""").collect
     sql(s"""drop table if exists test3""").collect
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""alter table test1 rename to test2""").collect
    sql(s"""alter table test2 rename to test3""").collect
@@ -103,7 +103,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("RenameTable_001_07_1", Include) {
     sql(s"""drop table if exists test2""").collect
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""alter table test1 RENAME TO test2""").collect
    sql(s"""Insert into test2 select 'yy',2""").collect
@@ -125,9 +125,9 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check alter table when the altered name is already present in the database
   test("RenameTable_001_08", Include) {
     intercept[Exception] {
-      sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
       sql(s"""insert into test1 select 'xx',1""").collect
-      sql(s"""create table test2 (name string, id int) stored by 'carbondata'""").collect
+      sql(s"""create table test2 (name string, id int) STORED AS carbondata""").collect
       sql(s"""alter table test1 RENAME TO test2""").collect
     }
 
@@ -139,7 +139,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check alter table when the altered name is given multiple times
   test("RenameTable_001_09", Include) {
     intercept[Exception] {
-      sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
       sql(s"""insert into test1 select 'xx',1""").collect
       sql(s"""alter table test1 RENAME TO test2 test3""").collect
     }
@@ -150,7 +150,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check delete column for dimension column
   test("DeleteCol_001_01", Include) {
     intercept[Exception] {
-      sql(s"""create table test1 (name string, id int) stored by 'carbondata'  """).collect
+      sql(s"""create table test1 (name string, id int) STORED AS carbondata  """).collect
       sql(s"""insert into test1 select 'xx',1""").collect
       sql(s"""alter table test1 drop columns (name)""").collect
       sql(s"""select name from test1""").collect
@@ -162,7 +162,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check delete column for measure column
   test("DeleteCol_001_02", Include) {
     intercept[Exception] {
-      sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
       sql(s"""insert into test1 select 'xx',1""").collect
       sql(s"""alter table test1 drop columns (id)""").collect
       sql(s"""select id from test1""").collect
@@ -174,7 +174,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check delete column for measure and dimension column
   test("DeleteCol_001_03", Include) {
     intercept[Exception] {
-      sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect
+      sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
       sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect
       sql(s"""alter table test1 drop columns (id,name)""").collect
       sql(s"""select id,name  from test1""").collect
@@ -186,7 +186,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check delete column for multiple column
   test("DeleteCol_001_04", Include) {
     intercept[Exception] {
-      sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata' """).collect
+      sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata """).collect
       sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect
       sql(s"""alter table test1 drop columns (name, upd_time)""").collect
       sql(s"""select name, upd_time from test1""").collect
@@ -197,7 +197,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check delete column for all columns
   test("DeleteCol_001_05", Include) {
-    sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect
+    sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
     sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect
     sql(s"""alter table test1 drop columns (name, upd_time, country,id)""").collect
     sql(s"""drop table if exists test1""").collect
@@ -207,7 +207,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check delete column for include dictionary column
   test("DeleteCol_001_06", Include) {
     intercept[Exception] {
-      sql(s"""create table test1 (name string, id int) stored by 'carbondata' """).collect
+      sql(s"""create table test1 (name string, id int) STORED AS carbondata """).collect
       sql(s"""insert into test1 select 'xx',1""").collect
       sql(s"""alter table test1 drop columns (id)""").collect
       sql(s"""select id from test1""").collect
@@ -219,7 +219,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check delete column for timestamp column
   test("DeleteCol_001_08", Include) {
     intercept[Exception] {
-      sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect
+      sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
       sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect
       sql(s"""alter table test1 drop columns (upd_time)""").collect
       sql(s"""select upd_time from test1""").collect
@@ -230,7 +230,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check the drop of added column will remove the column from table
   test("DeleteCol_001_09_1", Include) {
-     sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect
    sql(s"""alter table test1 add columns (name2 string)""").collect
    sql(s"""insert into test1 select 'xx','yy',current_timestamp,1,'abc'""").collect
@@ -243,7 +243,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check the drop of added column will remove the column from table
   test("DeleteCol_001_09_2", Include) {
     intercept[Exception] {
-     sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
      sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect
      sql(s"""alter table test1 add columns (name2 string)""").collect
      sql(s"""insert into test1 select 'xx','yy',current_timestamp,1,'abc'""").collect
@@ -256,7 +256,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Drop a column and add it again with a default value
   test("DeleteCol_001_10", Include) {
-     sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect
    sql(s"""alter table test1 drop columns (id)""").collect
    sql(s"""alter table test1 add columns (id bigint) tblproperties('default.value.id'='999')""").collect
@@ -269,7 +269,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Drop a column and add it again with a default value
   test("DeleteCol_001_11", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, country string, upd_time timestamp, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx','yy',current_timestamp,1""").collect
    sql(s"""alter table test1 drop columns (id)""").collect
    sql(s"""insert into test1 select 'a','china',current_timestamp""").collect
@@ -283,7 +283,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check add column for multiple column adds
   test("AddColumn_001_01", Include) {
      sql(s"""drop table if exists test1""").collect
-   sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+   sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (upd_time timestamp, country string)""").collect
     checkAnswer(s"""select upd_time, country from test1""",
@@ -295,7 +295,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check add column for dimension column and add table property to set default value
   test("AddColumn_001_02", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""insert into test1 select 'xx',12""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (country string) TBLPROPERTIES('DEFAULT.VALUE.country'='China')""").collect
@@ -308,7 +308,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check add column to add a measure column
   test("AddColumn_001_03", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (id1 int)""").collect
     checkAnswer(s"""select id1 from test1""",
@@ -320,7 +320,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check add column to add a measure column added with dictionary include
   test("AddColumn_001_04", Include) {
      sql(s"""drop table if exists test1""").collect
-   sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+   sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""insert into test1 select 'xx',11""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (id1 int) """).collect
@@ -333,7 +333,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check add column to add a measure column initialized with default value
   ignore("AddColumn_001_05", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""insert into test1 select 'xx',11""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (price decimal(10,6)) TBLPROPERTIES('DEFAULT.VALUE.price'='11.111')""").collect
@@ -346,7 +346,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check add column to add a measure column initialized with default value which does not suite the data type
   test("AddColumn_001_06", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (price bigint) TBLPROPERTIES('DEFAULT.VALUE.Price'='1.1')""").collect
     checkAnswer(s"""select price from test1""",
@@ -358,7 +358,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check add column to add a measure column initialized with default value on a empty table
   test("AddColumn_001_07", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (price bigint) TBLPROPERTIES('DEFAULT.VALUE.Price'='11')""").collect
     checkAnswer(s"""select count(id) from test1 where price = 11""",
@@ -370,7 +370,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check add column to add a dim and measure column
   test("AddColumn_001_08", Include) {
      sql(s"""drop table if exists test1""").collect
-   sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+   sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (id1 int, country string) """).collect
     checkAnswer(s"""select id1, country from test1""",
@@ -382,7 +382,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check add column for measure and make it dictionary column
   test("AddColumn_001_09", Include) {
      sql(s"""drop table if exists test1""").collect
-   sql(s"""create table test1 (name string) stored by 'carbondata'""").collect
+   sql(s"""create table test1 (name string) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx'""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (Id int)  """).collect
     checkAnswer(s"""select id from test1""",
@@ -394,7 +394,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check add column to add columns and exclude the dim col from dictionary
   test("AddColumn_001_10", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx'""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (upd_time timestamp, country string) """).collect
     checkAnswer(s"""select country, upd_time from test1""",
@@ -406,7 +406,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check add column to add a timestamp column
   test("AddColumn_001_11", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (upd_time timestamp)""").collect
     checkAnswer(s"""select upd_time from test1""",
@@ -419,7 +419,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("AddColumn_001_14", Include) {
     intercept[Exception] {
       sql(s"""drop table if exists test1""").collect
-      sql(s"""create table test1 (name string) stored by 'carbondata'""").collect
+      sql(s"""create table test1 (name string) STORED AS carbondata""").collect
       sql(s"""insert into test1 select 'xx'""").collect
       sql(s"""ALTER TABLE test1 ADD COLUMNS (Id int) TBLPROPERTIES('default.value.name'='yy')""").collect
     }
@@ -430,7 +430,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //check alter column for small decimal to big decimal
   test("AlterData_001_02", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, price decimal(3,2)) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, price decimal(3,2)) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1.2""").collect
    sql(s"""alter table test1 change price price decimal(10,7)""").collect
    sql(s"""insert into test1 select 'xx2',999.9999999""").collect
@@ -443,7 +443,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //check drop table after table rename using new name
   test("DropTable_001_01", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, price decimal(3,2)) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, price decimal(3,2)) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1.2""").collect
    sql(s"""alter table test1 rename to test2""").collect
     sql(s"""drop table test2""").collect
@@ -454,7 +454,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("DropTable_001_02", Include) {
     intercept[Exception] {
       sql(s"""drop table if exists test1""").collect
-      sql(s"""create table test1 (name string, price decimal(3,2)) stored by 'carbondata'""").collect
+      sql(s"""create table test1 (name string, price decimal(3,2)) STORED AS carbondata""").collect
       sql(s"""insert into test1 select 'xx',1.2""").collect
       sql(s"""alter table test1 rename to test2""").collect
       sql(s"""drop table test1""").collect
@@ -465,7 +465,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   //check drop table after table rename using new name, after table load
   test("DropTable_001_03", Include) {
-     sql(s"""create table test1 (name string, price decimal(3,2)) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, price decimal(3,2)) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1.2""").collect
    sql(s"""alter table test1 rename to test2""").collect
    sql(s"""insert into test2 select 'yy',1""").collect
@@ -477,7 +477,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //check drop table after alter table name, using new name when table is empty
   test("DropTable_001_04", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, price decimal(3,2)) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, price decimal(3,2)) STORED AS carbondata""").collect
    sql(s"""alter table test1 rename to test2""").collect
     sql(s"""drop table test2""").collect
 
@@ -487,7 +487,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //check drop table when table is altered by adding columns
   test("DropTable_001_05", Include) {
      sql(s"""drop table if exists test1""").collect
-   sql(s"""create table test1 (name string, id int) stored by 'carbondata'  """).collect
+   sql(s"""create table test1 (name string, id int) STORED AS carbondata  """).collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (upd_time timestamp, country string) TBLPROPERTIES( 'DEFAULT.VALUE.country'='China')""").collect
    sql(s"""insert into test1 select 'yy',1,current_timestamp,'xx'""").collect
@@ -499,7 +499,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check schema changes and carbon dictionary additions for alter table when new column added
   test("StorageFi_001_02", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (country string, name string) stored by 'carbondata' """).collect
+     sql(s"""create table test1 (country string, name string) STORED AS carbondata """).collect
    sql(s"""insert into test1 select 'xx','uu'""").collect
     sql(s"""alter table test1 add columns (price decimal(10,4)) tblproperties('DEFAULT.VALUE.price'='11.111')""").collect
      sql(s"""drop table if exists test1""").collect
@@ -509,7 +509,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check dictionary cache is loaded with new added column when query is run
   ignore("Dictionary_001_01", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, id decimal(3,2),country string) stored by 'carbondata' """).collect
+     sql(s"""create table test1 (name string, id decimal(3,2),country string) STORED AS carbondata """).collect
    sql(s"""insert into test1 select 'xx',1.22,'china'""").collect
    sql(s"""alter table test1 add columns (price decimal(10,4)) tblproperties('DEFAULT.VALUE.price'='11.111')""").collect
     checkAnswer(s"""select * from test1""",
@@ -521,7 +521,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check if dropped column is removed from driver side LRU cache
   test("Dictionary_001_02", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, id decimal(3,2),country string) stored by 'carbondata' """).collect
+     sql(s"""create table test1 (name string, id decimal(3,2),country string) STORED AS carbondata """).collect
    sql(s"""insert into test1 select 'xx',1.22,'china'""").collect
    sql(s"""alter table test1 drop columns (country)""").collect
     checkAnswer(s"""select * from test1""",
@@ -533,7 +533,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check if dropped column is removed from driver side LRU cache at driver side
   test("Dictionary_001_03", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, id decimal(3,2),country string) stored by 'carbondata' """).collect
+     sql(s"""create table test1 (name string, id decimal(3,2),country string) STORED AS carbondata """).collect
    sql(s"""insert into test1 select 'xx',1.22,'china'""").collect
    sql(s"""alter table test1 drop columns(country)""").collect
     checkAnswer(s"""select * from test1""",
@@ -546,7 +546,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("Dataload_001_01", Include) {
      sql(s"""drop table if exists t_carbn01t""").collect
    sql(s"""drop table if exists t_carbn01""").collect
-   sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+   sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
    sql(s"""alter table t_carbn01 rename to t_carbn01t""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01t options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
@@ -557,7 +557,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check table load into old table after alter table name
   test("Dataload_001_02", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
-     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
    sql(s"""alter table t_carbn01 rename to t_carbn01t""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01t options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
@@ -570,7 +570,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check table load works fine after alter table name
   test("Dataload_001_03", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
-     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
    sql(s"""alter table t_carbn01 change Profit Profit Decimal(10,4)""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
@@ -581,7 +581,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check table load works fine after alter table name
   test("Dataload_001_04", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
-     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
    sql(s"""alter table t_carbn01 add columns (item_code1 string, item_code2 string)""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive2.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date,item_code1, item_code2')""").collect
@@ -594,7 +594,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check table load works fine after alter table name
   test("Dataload_001_05", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
-     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
    sql(s"""alter table t_carbn01 drop columns (Update_time, create_date)""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive2.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name')""").collect
@@ -607,7 +607,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check if alter table(add column) is supported when data load is happening
   test("Concurrent_alter_001_01", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
-     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
    sql(s"""alter table t_carbn01 add columns (item_code1 string, item_code2 string)""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive2.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date,item_code1,item_code2')""").collect
@@ -618,7 +618,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check if alter table(delete column) is supported when data load is happening
   test("Concurrent_alter_001_02", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
-     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
     sql(s"""alter table t_carbn01 drop columns (Update_time, create_date)""").collect
      sql(s"""drop table if exists t_carbn01""").collect
@@ -628,7 +628,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check if alter table(change column) is supported when data load is happening
   test("Concurrent_alter_001_03", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
-     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
     sql(s"""alter table t_carbn01 change Profit Profit Decimal(10,4)""").collect
      sql(s"""drop table if exists t_carbn01""").collect
@@ -638,7 +638,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check if alter table(rename) is supported when data load is happening
   test("Concurrent_alter_001_04", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
-     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
     sql(s"""alter table t_carbn01 rename to t_carbn01t""").collect
      sql(s"""drop table if exists t_carbn01t""").collect
@@ -649,9 +649,9 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("Insertint_001_03", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
     sql(s"""drop table if exists default.t_carbn02""").collect
-     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
-   sql(s"""create table default.t_carbn02(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+   sql(s"""create table default.t_carbn02(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""alter table t_carbn02 add columns (item_name1 string)""").collect
    sql(s"""insert into t_carbn02 select *, 'xxx' from t_carbn01""").collect
     sql(s"""Select count(*) from t_carbn02""").collect
@@ -665,9 +665,9 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("Insertint_001_04", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
     sql(s"""drop table if exists default.t_carbn02""").collect
-     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
-   sql(s"""create table default.t_carbn02(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+   sql(s"""create table default.t_carbn02(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""alter table t_carbn02 change Profit Profit decimal(10,4)""").collect
    sql(s"""insert into t_carbn02 select * from t_carbn01""").collect
     sql(s"""Select count(*) from t_carbn02""").collect
@@ -680,8 +680,8 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //check table insert works fine after alter table to drop columns
   test("Insertint_001_05", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test2 (country string, name string, state_id int,id int) stored by 'carbondata' """).collect
-   sql(s"""create table test1 (country string, state_id int) stored by 'carbondata' """).collect
+     sql(s"""create table test2 (country string, name string, state_id int,id int) STORED AS carbondata """).collect
+   sql(s"""create table test1 (country string, state_id int) STORED AS carbondata """).collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""alter table test2 drop columns (name, id)""").collect
    sql(s"""insert into test2 select * from test1""").collect
@@ -695,7 +695,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check show segments on old table After altering the Table name.
   test("Showsegme_001_01", Include) {
     intercept[Exception] {
-      sql(s"""create table test1 (country string, id int) stored by 'carbondata'""").collect
+      sql(s"""create table test1 (country string, id int) STORED AS carbondata""").collect
       sql(s"""alter table test1 rename to test2""").collect
       sql(s"""show segments for table test1""").collect
     }
@@ -707,7 +707,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("Compaction_001_01", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""drop table if exists test2""").collect
-   sql(s"""create table test1(name string, id int) stored by 'carbondata'""").collect
+   sql(s"""create table test1(name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""insert into test1 select 'xe',2""").collect
    sql(s"""insert into test1 select 'xr',3""").collect
@@ -721,7 +721,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("Compaction_001_02", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""drop table if exists test2""").collect
-   sql(s"""create table test1(name string, id int) stored by 'carbondata'""").collect
+   sql(s"""create table test1(name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""insert into test1 select 'xe',2""").collect
    sql(s"""insert into test1 select 'xr',3""").collect
@@ -737,7 +737,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("Compaction_001_03", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""drop table if exists test2""").collect
-   sql(s"""create table test1(name string, id int) stored by 'carbondata'""").collect
+   sql(s"""create table test1(name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""insert into test1 select 'xe',2""").collect
    sql(s"""alter table test1 rename to test2""").collect
@@ -754,7 +754,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
      sql(s"""drop table if exists test1""").collect
    sql(s"""drop table if exists test2""").collect
     sql(s"""drop table if exists test3""").collect
-   sql(s"""create table test1(name string, id int) stored by 'carbondata'""").collect
+   sql(s"""create table test1(name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""alter table test1 rename to test2""").collect
    sql(s"""insert into test2 select 'xe',2""").collect
@@ -771,7 +771,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("Compaction_001_05", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""drop table if exists test2""").collect
-   sql(s"""create table test1(name string, id int) stored by 'carbondata'""").collect
+   sql(s"""create table test1(name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""insert into test1 select 'xe',2""").collect
    sql(s"""alter table test1 rename to test2""").collect
@@ -788,7 +788,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     intercept[Exception] {
       sql(s"""drop table if exists test1""").collect
       sql(s"""drop table if exists test2""").collect
-      sql(s"""create table test1(name string, country string, id int) stored by 'carbondata'""").collect
+      sql(s"""create table test1(name string, country string, id int) STORED AS carbondata""").collect
       sql(s"""insert into test1 select 'xx','china',1""").collect
       sql(s"""insert into test1 select 'xe','china',2""").collect
       sql(s"""insert into test1 select 'xe','china',3""").collect
@@ -805,7 +805,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     intercept[Exception] {
       sql(s"""drop table if exists test1""").collect
       sql(s"""drop table if exists test2""").collect
-      sql(s"""create table test1(name string, country string, id int) stored by 'carbondata'""").collect
+      sql(s"""create table test1(name string, country string, id int) STORED AS carbondata""").collect
       sql(s"""insert into test1 select 'xx','china',1""").collect
       sql(s"""insert into test1 select 'xe','china',2""").collect
       sql(s"""alter table test1 drop columns (country)""").collect
@@ -822,7 +822,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
     intercept[Exception] {
       sql(s"""drop table if exists test1""").collect
       sql(s"""drop table if exists test2""").collect
-      sql(s"""create table test1(name string, country string, id int) stored by 'carbondata'""").collect
+      sql(s"""create table test1(name string, country string, id int) STORED AS carbondata""").collect
       sql(s"""insert into test1 select 'xx','china',1""").collect
       sql(s"""alter table test1 drop columns (country)""").collect
       sql(s"""insert into test1 select 'xe',3""").collect
@@ -839,7 +839,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("Compaction_001_09", Include) {
      sql(s"""drop table if exists test1""").collect
    sql(s"""drop table if exists test2""").collect
-   sql(s"""create table test1(name string) stored by 'carbondata'""").collect
+   sql(s"""create table test1(name string) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx3'""").collect
    sql(s"""insert into test1 select 'xx2'""").collect
    sql(s"""insert into test1 select 'xx1'""").collect
@@ -854,7 +854,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check vertical compaction on altered table for column add, when some of the segments crreated before table alter. Ensure added column in the compacted segment
   test("Compaction_001_10", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1(name string) stored by 'carbondata'""").collect
+     sql(s"""create table test1(name string) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx1'""").collect
    sql(s"""insert into test1 select 'xx2'""").collect
    sql(s"""alter table test1 add columns (country string)""").collect
@@ -869,7 +869,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check vertical compaction on multiple altered table for column add, when some of the segments crreated after table alter. Ensure added column in the compacted segment
   test("Compaction_001_11", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1(name string) stored by 'carbondata'""").collect
+     sql(s"""create table test1(name string) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx1'""").collect
    sql(s"""insert into test1 select 'xx2'""").collect
    sql(s"""alter table test1 add columns (id int)  """).collect
@@ -886,7 +886,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check vertical compaction on altered table for change column datatype, when some of the segments crreated after table alter. Ensure added column in the compacted segment
   test("Compaction_001_12", Include) {
     sql(s"""drop table if exists default.test1""").collect
-     sql(s"""create table test1(name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1(name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx1',1""").collect
    sql(s"""insert into test1 select 'xx2',2""").collect
    sql(s"""alter table test1 change id id bigint """).collect
@@ -909,7 +909,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check bad record locaion isnot changed when table name is altered
   test("BadRecords_001_01", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
-     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1_Bad.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\','BAD_RECORDS_LOGGER_ENABLE'='true', 'BAD_RECORDS_ACTION'='REDIRECT', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
    sql(s"""alter table default.t_carbn01 rename to default.t_carbn01t""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1_Bad.csv' INTO table default.t_carbn01t options ('DELIMITER'=',', 'QUOTECHAR'='\','BAD_RECORDS_LOGGER_ENABLE'='true', 'BAD_RECORDS_ACTION'='REDIRECT', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
@@ -920,7 +920,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check bad record locaion isnot changed when table name is altered
   test("BadRecords_001_02", Include) {
     sql(s"""drop table if exists default.t_carbn01""").collect
-     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""create table default.t_carbn01(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1_Bad.csv' INTO table default.t_carbn01 options ('DELIMITER'=',', 'QUOTECHAR'='\','BAD_RECORDS_LOGGER_ENABLE'='true', 'BAD_RECORDS_ACTION'='REDIRECT', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
     sql(s"""alter table t_carbn01 drop columns (item_name)""").collect
      sql(s"""drop table if exists default.t_carbn01""").collect
@@ -930,7 +930,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check for bad record handling while latering the table if added column is set with default value which is a bad record
   test("BadRecords_001_03", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""insert into test1 select 'xx',12""").collect
    sql(s"""ALTER TABLE test1 ADD COLUMNS (id2 int) TBLPROPERTIES('include_dictionary'='id2','DEFAULT.VALUE.id2'='China')""").collect
@@ -943,7 +943,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check delete segment is not allowed on old table name when table name is altered
   test("DeleteSeg_001_01", Include) {
     intercept[Exception] {
-      sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+      sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
       sql(s"""insert into test1 select 'xx',1""").collect
       sql(s"""insert into test1 select 'xx',12""").collect
       sql(s"""alter table test1 rename to test2""").collect
@@ -956,7 +956,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check delete segment is allowed on new table name when table name is altered
   test("DeleteSeg_001_02", Include) {
     sql(s"""drop table if exists test1""").collect
-     sql(s"""create table test1 (name string, id int) stored by 'carbondata'""").collect
+     sql(s"""create table test1 (name string, id int) STORED AS carbondata""").collect
    sql(s"""insert into test1 select 'xx',1""").collect
    sql(s"""insert into test1 select 'xx',12""").collect
    sql(s"""alter table test1 rename to test2""").collect
@@ -971,7 +971,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   test("AlterTable-001-AltersameTablename-001-TC001", Include) {
      sql(s"""drop table  if exists uniqdata""").collect
    sql(s"""drop table  if exists uniqdata1""").collect
-   sql(s"""CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' """).collect
+   sql(s"""CREATE TABLE uniqdata (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata """).collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
    sql(s"""alter table uniqdata RENAME TO  uniqdata1""").collect
    sql(s"""alter table uniqdata1 RENAME TO uniqdata""").collect
@@ -984,7 +984,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check select query after alter the int to Bigint and decimal Lower Precision to higher precision
   test("AlterTable-007-selectquery-001-TC002", Include) {
-     sql(s"""CREATE TABLE uniqdata1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' """).collect
+     sql(s"""CREATE TABLE uniqdata1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata """).collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata1 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
    sql(s"""ALTER TABLE uniqdata1 CHANGE CUST_ID CUST_ID BIGINT""").collect
     sql(s"""select * from uniqdata1 where cust_name like 'Cust%'""").collect
@@ -995,7 +995,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check select query after alter from lower to higher precision
   test("AlterTable-008-selectquery-001-TC003", Include) {
-     sql(s"""CREATE TABLE uniqdata1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' """).collect
+     sql(s"""CREATE TABLE uniqdata1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata """).collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/2000_UniqData.csv' into table uniqdata1 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE', 'BAD_RECORDS_ACTION'='FORCE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
    sql(s"""ALTER TABLE uniqdata1 CHANGE decimal_column1 decimal_column1 DECIMAL(31,11)""").collect
     sql(s"""select * from uniqdata1 where cust_name like 'Cust%'""").collect
@@ -1007,7 +1007,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
   //Check add column on Decimal,Timestamp,int,string,Bigint
   test("AlterTable-002-001-TC-004", Include) {
      sql(s"""drop table if exists uniqdata59""").collect
-   sql(s"""CREATE TABLE uniqdata59 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'org.apache.carbondata.format' """).collect
+   sql(s"""CREATE TABLE uniqdata59 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata """).collect
     sql(s"""ALTER TABLE uniqdata59 ADD COLUMNS (a1 int,a2 int,a3 decimal,a4 Bigint,a5 String,a6 timestamp,a7 Bigint,a8 decimal(10,2),a9 timestamp,a10 String,a11 string,a12 string,a13 string,a14 string,a15 string,a16 string,a17 string,a18 string,a19 string,a20 string,a21 string,a22 string,a23 string,a24 string,a25 string,a26 string,a27 string,a28 string,a29 string,a30 string,a31 string,a32 string,a33 string,a34 string,a35 string,a36 string,a37 string,a38 string,a39 string,a40 string,a41 s [...]
      sql(s"""drop table  if exists uniqdata59""").collect
   }
@@ -1090,7 +1090,7 @@ class AlterTableTestCase extends QueryTest with BeforeAndAfterAll {
 
   test("Test drop columns not present in the table") {
     sql("drop table if exists test1")
-    sql("create table test1(col1 int) stored by 'carbondata'")
+    sql("create table test1(col1 int) STORED AS carbondata")
     val exception = intercept[ProcessMetaDataException] {
       sql("alter table test1 drop columns(name)")
     }
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BadRecordTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BadRecordTestCase.scala
index 84d6304..90bb7a5 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BadRecordTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BadRecordTestCase.scala
@@ -31,7 +31,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   
   //Create table and Load history data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true from CSV without header and specify headers in command
   test("BadRecords-001_PTS001_TC001", Include) {
-     sql(s"""CREATE TABLE badrecordtest1 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordtest1 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test2.csv' into table badrecordtest1 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='ID,CUST_ID,cust_name')""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test2.csv' into table badrecordtest1 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='ID,CUST_ID,cust_name')""").collect
     checkAnswer(s"""select count(*) from badrecordTest1""",
@@ -42,7 +42,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Create table and Load history data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true from CSV with  header and specify header in command
   test("BadRecords-001_PTS002_TC001", Include) {
-     sql(s"""CREATE TABLE badrecordtest2 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordtest2 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test3.csv' into table badrecordtest2 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='ID,CUST_ID,cust_name')""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test3.csv' into table badrecordtest2 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='ID,CUST_ID,cust_name')""").collect
     checkAnswer(s"""select count(*) from badrecordtest2""",
@@ -53,7 +53,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Create table and Load history data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true from CSV with  header and without specify header in command
   test("BadRecords-001_PTS003_TC001", Include) {
-     sql(s"""CREATE TABLE badrecordtest3 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordtest3 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test3.csv' into table badrecordtest3 OPTIONS('FILEHEADER'='ID,CUST_ID,cust_name','DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test3.csv' into table badrecordtest3 OPTIONS('FILEHEADER'='ID,CUST_ID,cust_name','DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
     checkAnswer(s"""select count(*) from badrecordtest3""",
@@ -64,7 +64,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Create table and load the data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true with CSV has incomplete/wrong data
   test("BadRecords-001_PTS004_TC001", Include) {
-     sql(s"""CREATE TABLE badrecordtest4 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordtest4 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test4.csv' into table badrecordtest4 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test4.csv' into table badrecordtest4 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
     checkAnswer(s"""select count(*) from badrecordtest4""",
@@ -75,7 +75,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Create table and load data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true for data types with boundary values of data type
   test("BadRecords-001_PTS005_TC001", Include) {
-     sql(s"""CREATE TABLE badrecordtest5 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordtest5 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test5.csv' into table badrecordtest5 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test5.csv' into table badrecordtest5 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
     checkAnswer(s"""select count(*) from badrecordtest5""",
@@ -87,7 +87,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   //create table and Load history data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true  from CSV with' Delimiters , Quote characters '
   test("BadRecords-001_PTS006_TC001", Include) {
     sql(s"""drop table if exists abadrecordtest1""").collect
-    sql(s"""CREATE TABLE abadrecordtest1 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""CREATE TABLE abadrecordtest1 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test6.csv' into table abadrecordtest1 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'="'",'is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
       checkAnswer(
         s"""select count(*) from abadrecordtest1""",
@@ -98,7 +98,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
 
   //create the table and load the data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true column value with separator (/ , \ ,!,\001)
   test("BadRecords-001_PTS007_TC001", Include) {
-     sql(s"""CREATE TABLE badrecordtest6 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordtest6 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
     intercept[Exception] {
       sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/qoute1.csv' into table badrecordtest6 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='/','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE')""").collect
       sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/qoute3.csv' into table badrecordtest6 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='\','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE')""").collect
@@ -115,7 +115,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   test("BadRecords-001_PTS008_TC001", Include) {
     sql(s"""drop table if exists badrecordTest7""").collect
     sql(s"""drop table if exists hivetable7""").collect
-     sql(s"""CREATE TABLE badrecordtest7 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordtest7 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""CREATE TABLE hivetable7 (ID int,CUST_ID int,cust_name string) row format delimited fields terminated by ','""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test2_hive1.csv' into table hivetable7""").collect
    sql(s"""insert into table badrecordtest7 select * from hivetable7""").collect
@@ -130,7 +130,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   test("BadRecords-001_PTS015_TC001", Include) {
     sql(s"""drop table if exists badrecordTest9""").collect
     sql(s"""drop table if exists hivetable9""").collect
-     sql(s"""CREATE TABLE badrecordTest9 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordTest9 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""CREATE TABLE hivetable9 (ID int,CUST_ID int,cust_name string) row format delimited fields terminated by ','""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test2_hive2.csv' into table hivetable9""").collect
    sql(s"""insert into table badrecordTest9 select * from hivetable9""").collect
@@ -143,7 +143,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Show segments for table when data loading having parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true
   test("BadRecords-001_PTS020_TC001", Include) {
-     sql(s"""CREATE TABLE badrecordTest13 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format' """).collect
+     sql(s"""CREATE TABLE badrecordTest13 (ID int,CUST_ID int,cust_name string) STORED AS carbondata """).collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test3.csv' into table badrecordTest13 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='ID,CUST_ID,cust_name')""").collect
     sql(s"""SHOW SEGMENTS FOR TABLE badrecordTest13""").collect
      sql(s"""drop table if exists badrecordTest13""").collect
@@ -152,7 +152,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Create table and Load data with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true  for date and char types using vectorized reader parameters
   test("BadRecords-001_PTS012_TC001", Include) {
-     sql(s"""CREATE TABLE badrecordtest14 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordtest14 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test3.csv' into table badrecordtest14 OPTIONS('FILEHEADER'='ID,CUST_ID,cust_name','DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE')""").collect
     checkAnswer(s"""select count(*) from badrecordTest14""",
       Seq(Row(3)), "BadRecordTestCase-BadRecords-001_PTS012_TC001")
@@ -162,7 +162,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check the data load with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true, data having ""(empty in double quote)
   test("BadRecords-001_PTS021_TC001", Include) {
-     sql(s"""CREATE TABLE badrecordtest15 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordtest15 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/doubleqoute.csv' into table badrecordtest15 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
     checkAnswer(s"""select count(*) from badrecordTest15""",
       Seq(Row(1)), "BadRecordTestCase-BadRecords-001_PTS021_TC001")
@@ -173,7 +173,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
   //Check the data load with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true, data having  a,  insufficient column
   test("BadRecords-001_PTS022_TC001", Include) {
     sql(s"""drop table if exists badrecordTest16""").collect
-     sql(s"""CREATE TABLE badrecordtest16 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordtest16 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/insuffcient.csv' into table badrecordtest16 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE')""").collect
     checkAnswer(s"""select count(*) from badrecordTest16""",
       Seq(Row(2)), "BadRecordTestCase-BadRecords-001_PTS022_TC001")
@@ -183,7 +183,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check the data load with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true, data having ‘’ (empty in single quote)
   test("BadRecords-001_PTS023_TC001", Include) {
-     sql(s"""CREATE TABLE badrecordtest17 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordtest17 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/test6.csv' into table badrecordtest17 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'="'",'is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
     checkAnswer(s"""select count(*) from badrecordTest17""",
       Seq(Row(3)), "BadRecordTestCase-BadRecords-001_PTS023_TC001")
@@ -193,7 +193,7 @@ class BadRecordTestCase extends QueryTest with BeforeAndAfterAll {
 
   //Check the data load with parameters BAD_RECORDS_ACTION=FAIL/FORCE/REDIRECT/IGNORE,BAD_RECORD_LOGGER_ENABLE=true/false and IS_EMPTY_DATA_BAD_RECORD=false/true, data having ,(empty comma)
   test("BadRecords-001_PTS024_TC001", Include) {
-     sql(s"""CREATE TABLE badrecordtest18 (ID int,CUST_ID int,cust_name string) STORED BY 'org.apache.carbondata.format'""").collect
+     sql(s"""CREATE TABLE badrecordtest18 (ID int,CUST_ID int,cust_name string) STORED AS carbondata""").collect
    sql(s"""LOAD DATA INPATH '$resourcesPath/Data/badrecord/emptyComma.csv' into table badrecordtest18 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','is_empty_data_bad_record'='false','BAD_RECORDS_ACTION'='IGNORE','BAD_RECORDS_LOGGER_ENABLE'='TRUE')""").collect
     checkAnswer(s"""select count(*) from badrecordTest18""",
       Seq(Row(1)), "BadRecordTestCase-BadRecords-001_PTS024_TC001")
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BloomFilterDataMapTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BloomFilterDataMapTestCase.scala
index f03d0cf..ac8bc11 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BloomFilterDataMapTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BloomFilterDataMapTestCase.scala
@@ -66,7 +66,7 @@ class BloomFilterDataMapTestCase extends QueryTest with BeforeAndAfterEach with
          |    stringLocalDictField string,
          |    longStringField string
          | )
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
          | TBLPROPERTIES(
          |  'LONG_STRING_COLUMNS'='longStringField',
          |  'SORT_COLUMNS'='stringSortField',
@@ -148,7 +148,7 @@ class BloomFilterDataMapTestCase extends QueryTest with BeforeAndAfterEach with
          |    stringLocalDictField string,
          |    longStringField string
          | )
-         | STORED BY 'carbondata'
+         | STORED AS carbondata
          | TBLPROPERTIES(
          |  'LONG_STRING_COLUMNS'='longStringField',
          |  'local_dictionary_enable'='true',
@@ -186,8 +186,8 @@ class BloomFilterDataMapTestCase extends QueryTest with BeforeAndAfterEach with
     val dataMapName = "bloom_datamap"
     sql(s"DROP TABLE IF EXISTS $normalTable")
     sql(s"DROP TABLE IF EXISTS $bloomDMSampleTable")
-    sql(s"CREATE TABLE $normalTable(c1 string, c2 int, c3 string) STORED BY 'carbondata'")
-    sql(s"CREATE TABLE $bloomDMSampleTable(c1 string, c2 int, c3 string) STORED BY 'carbondata'")
+    sql(s"CREATE TABLE $normalTable(c1 string, c2 int, c3 string) STORED AS carbondata")
+    sql(s"CREATE TABLE $bloomDMSampleTable(c1 string, c2 int, c3 string) STORED AS carbondata")
     // load data with empty value
     sql(s"INSERT INTO $normalTable SELECT '', 1, 'xxx'")
     sql(s"INSERT INTO $bloomDMSampleTable SELECT '', 1, 'xxx'")
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala
index 92b94d0..e42b008 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/BucketingTestCase.scala
@@ -43,7 +43,7 @@ class BucketingTestCase extends QueryTest with BeforeAndAfterAll {
     intercept[Exception] {
       sql("DROP TABLE IF EXISTS bucket_table")
       sql("CREATE TABLE bucket_table (ID Int, date Timestamp, country String, name String, phonetype String," +
-          "serialname String, salary Int) STORED BY 'carbondata' TBLPROPERTIES " +
+          "serialname String, salary Int) STORED AS carbondata TBLPROPERTIES " +
           "('BUCKETNUMBER'='4', 'BUCKETCOLUMNS'='ID')")
     }
   }
@@ -52,7 +52,7 @@ class BucketingTestCase extends QueryTest with BeforeAndAfterAll {
     intercept[Exception] {
       sql("DROP TABLE IF EXISTS bucket_table")
       sql("CREATE TABLE bucket_table (Id int, number double, name string, " +
-          "gamePoint array<double>, mac struct<num:double>) STORED BY 'carbondata' TBLPROPERTIES" +
+          "gamePoint array<double>, mac struct<num:double>) STORED AS carbondata TBLPROPERTIES" +
           "('BUCKETNUMBER'='4', 'BUCKETCOLUMNS'='gamePoint')")
     }
   }
@@ -60,7 +60,7 @@ class BucketingTestCase extends QueryTest with BeforeAndAfterAll {
   test("test multi columns as bucketcolumns") {
     sql("DROP TABLE IF EXISTS bucket_table")
     sql("CREATE TABLE bucket_table (ID Int, date Timestamp, country String, name String, phonetype String," +
-        "serialname String, salary Int) STORED BY 'carbondata' TBLPROPERTIES " +
+        "serialname String, salary Int) STORED AS carbondata TBLPROPERTIES " +
         "('BUCKETNUMBER'='4', 'BUCKETCOLUMNS'='name,phonetype')")
     sql(s"LOAD DATA INPATH '$resourcesPath/source.csv' INTO TABLE bucket_table")
     val table: CarbonTable = CarbonMetadata.getInstance().getCarbonTable("default_bucket_table")
@@ -74,7 +74,7 @@ class BucketingTestCase extends QueryTest with BeforeAndAfterAll {
   test("test multi columns as bucketcolumns with bucket join") {
     sql("DROP TABLE IF EXISTS bucket_table")
     sql("CREATE TABLE bucket_table (ID Int, date Timestamp, country String, name String, phonetype String," +
-        "serialname String, salary Int) STORED BY 'carbondata' TBLPROPERTIES " +
+        "serialname String, salary Int) STORED AS carbondata TBLPROPERTIES " +
         "('BUCKETNUMBER'='4', 'BUCKETCOLUMNS'='country,name')")
     sql(s"LOAD DATA INPATH '$resourcesPath/source.csv' INTO TABLE bucket_table")
 
@@ -94,7 +94,7 @@ class BucketingTestCase extends QueryTest with BeforeAndAfterAll {
   test("test non bucket column join") {
     sql("DROP TABLE IF EXISTS bucket_table")
     sql("CREATE TABLE bucket_table (ID Int, date Timestamp, country String, name String, phonetype String," +
-        "serialname String, salary Int) STORED BY 'carbondata' TBLPROPERTIES " +
+        "serialname String, salary Int) STORED AS carbondata TBLPROPERTIES " +
         "('BUCKETNUMBER'='4', 'BUCKETCOLUMNS'='country')")
     sql(s"LOAD DATA INPATH '$resourcesPath/source.csv' INTO TABLE bucket_table")
 
@@ -115,7 +115,7 @@ class BucketingTestCase extends QueryTest with BeforeAndAfterAll {
   test("test bucketcolumns through multi data loading plus compaction") {
     sql("DROP TABLE IF EXISTS bucket_table")
     sql("CREATE TABLE bucket_table (ID Int, date Timestamp, country String, name String, phonetype String," +
-        "serialname String, salary Int) STORED BY 'carbondata' TBLPROPERTIES " +
+        "serialname String, salary Int) STORED AS carbondata TBLPROPERTIES " +
         "('BUCKETNUMBER'='4', 'BUCKETCOLUMNS'='name')")
     val numOfLoad = 10
     for (j <- 0 until numOfLoad) {
@@ -139,7 +139,7 @@ class BucketingTestCase extends QueryTest with BeforeAndAfterAll {
   test("drop non-bucket column, test bucket column join") {
     sql("DROP TABLE IF EXISTS bucket_table")
     sql("CREATE TABLE bucket_table (ID Int, date Timestamp, country String, name String, phonetype String," +
-        "serialname String, salary Int) STORED BY 'carbondata' TBLPROPERTIES " +
+        "serialname String, salary Int) STORED AS carbondata TBLPROPERTIES " +
         "('BUCKETNUMBER'='4', 'BUCKETCOLUMNS'='name')")
     sql(s"LOAD DATA INPATH '$resourcesPath/source.csv' INTO TABLE bucket_table")
 
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala
index 723ecf5..3a211e2 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/ComplexDataTypeTestCase.scala
@@ -73,7 +73,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS test")
     sql(
       "create table test(person struct<detail:struct<id:int,name:string,height:double," +
-      "status:boolean,dob:date,dobt:timestamp>>) stored by 'carbondata'")
+      "status:boolean,dob:date,dobt:timestamp>>) STORED AS carbondata")
     sql("insert into test values(named_struct('detail', named_struct('id', 1, 'name', 'abc', 'height', 4.30, 'status', true, 'dob', '2017-08-09', 'dobt', '2017-08-09 00:00:00.0')))")
     checkAnswer(sql("select * from test"),
       Seq(Row(Row(Row(1, "abc", 4.3, true, java.sql.Date.valueOf("2017-08-09"),
@@ -81,7 +81,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS test")
     sql(
       "create table test(p1 array<int>,p2 array<string>,p3 array<double>,p4 array<boolean>,p5 " +
-      "array<date>,p6 array<timestamp>) stored by 'carbondata'")
+      "array<date>,p6 array<timestamp>) STORED AS carbondata")
     sql("insert into test values(array(1,2,3), array('abc','def','mno'), array(4.30,4.60,5.20), array(true,true,false), array('2017-08-09','2017-08-09','2017-07-07'), array('2017-08-09 00:00:00.0','2017-08-09 00:00:00.0','2017-07-07 00:00:00.0'))")
     checkAnswer(sql("select * from test"),
       Seq(Row(mutable.WrappedArray.make(Array(1, 2, 3)),
@@ -112,7 +112,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
         "ActiveProvince:string, Activecity:string, ActiveDistrict:string, ActiveStreet:string>>," +
         "proddate struct<productionDate:string,activeDeactivedate:array<string>>, gamePointId " +
         "double,contractNumber double) " +
-        "STORED BY 'org.apache.carbondata.format'")
+        "STORED AS carbondata")
     sql(
       s"LOAD DATA local inpath '$filePath/complexdata.csv' INTO table " +
       "complexcarbontable " +
@@ -138,7 +138,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS test")
     sql(
       "create table test(person struct<detail:struct<id:int,name:string,height:double," +
-      "status:boolean,dob:date,dobt:timestamp>>) stored by 'carbondata' ")
+      "status:boolean,dob:date,dobt:timestamp>>) STORED AS carbondata ")
     sql("insert into test values(named_struct('detail', named_struct('id', 1, 'name', 'abc', 'height', 4.30, 'status', true, 'dob', '2017-08-09', 'dobt', '2017-08-09 00:00:00.0')))")
     checkAnswer(sql("select * from test"),
       Seq(Row(Row(Row(1,
@@ -147,7 +147,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS test")
     sql(
       "create table test(p1 array<int>,p2 array<string>,p3 array<double>,p4 array<boolean>,p5 " +
-      "array<date>,p6 array<timestamp>) stored by 'carbondata' ")
+      "array<date>,p6 array<timestamp>) STORED AS carbondata ")
     sql("insert into test values(array(1,2,3), array('abc','def','mno'), array(4.30,4.60,5.20), array(true,true,false), array('2017-08-09','2017-08-09','2017-07-07'), array('2017-08-09 00:00:00.0','2017-08-09 00:00:00.0','2017-07-07 00:00:00.0'))")
     checkAnswer(sql("select * from test"),
       Seq(Row(mutable.WrappedArray.make(Array(1, 2, 3)),
@@ -178,7 +178,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
         "ActiveProvince:string, Activecity:string, ActiveDistrict:string, ActiveStreet:string>>," +
         "proddate struct<productionDate:string,activeDeactivedate:array<string>>, gamePointId " +
         "double,contractNumber double) " +
-        "STORED BY 'org.apache.carbondata.format'")
+        "STORED AS carbondata")
     sql(
       s"LOAD DATA local inpath '$filePath/complexdata.csv' INTO table " +
       "complexcarbontable " +
@@ -187,7 +187,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
       "'COMPLEX_DELIMITER_LEVEL_1'='$', 'COMPLEX_DELIMITER_LEVEL_2'=':')")
     checkAnswer(sql("select count(*) from complexcarbontable"), Seq(Row(100)))
     sql("DROP TABLE IF EXISTS test")
-    sql("create table test stored by 'carbondata' as select * from complexcarbontable")
+    sql("create table test STORED AS carbondata as select * from complexcarbontable")
     checkAnswer(sql("select count(*) from test"), Seq(Row(100)))
   }
 
@@ -196,10 +196,8 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
     sql("DROP TABLE IF EXISTS complexcarbontable")
     sql(
       "create table complexcarbontable (roll int,a struct<b:int,c:string,d:int,e:string," +
-      "f:struct<g:int," +
-      "h:string,i:int>,j:int>) stored " +
-      "by " +
-      "'carbondata'")
+      "f:struct<g:int,h:string,i:int>,j:int>) " +
+      "STORED AS carbondata")
     sql("insert into complexcarbontable values(1, named_struct('b', 1, 'c', 'abc', 'd', 2, 'e', 'efg', 'f', named_struct('g', 3, 'h', 'mno', 'i', 4), 'j', 5))")
     sql("insert into complexcarbontable values(2, named_struct('b', 1, 'c', 'abc', 'd', 2, 'e', 'efg', 'f', named_struct('g', 3, 'h', 'mno', 'i', 4), 'j', 5))")
     sql("insert into complexcarbontable values(3, named_struct('b', 1, 'c', 'abc', 'd', 2, 'e', 'efg', 'f', named_struct('g', 3, 'h', 'mno', 'i', 4), 'j', 5))")
@@ -241,7 +239,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
   // check create table with complex datatype columns and insert into table and apply filters
   test("test Complex_DataType-006") {
     sql("DROP TABLE IF EXISTS test")
-    sql("create table test(id int,a struct<b:int,c:int>) stored by 'carbondata'")
+    sql("create table test(id int,a struct<b:int,c:int>) STORED AS carbondata")
     sql("insert into test values(1, named_struct('b', 2, 'c', 3))")
     sql("insert into test values(3, named_struct('b', 5, 'c', 3))")
     sql("insert into test values(2, named_struct('b', 4, 'c', 5))")
@@ -261,7 +259,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
         "ActiveProvince:string, Activecity:string, ActiveDistrict:string, ActiveStreet:string>>," +
         "proddate struct<productionDate:string,activeDeactivedate:array<string>>, gamePointId " +
         "double,contractNumber double) " +
-        "STORED BY 'org.apache.carbondata.format'")
+        "STORED AS carbondata")
     sql(
       s"LOAD DATA local inpath '$filePath/complexdata.csv' INTO table " +
       "complexcarbontable " +
@@ -276,7 +274,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
         "ActiveProvince:string, Activecity:string, ActiveDistrict:string, ActiveStreet:string>>," +
         "proddate struct<productionDate:string,activeDeactivedate:array<string>>, gamePointId " +
         "double,contractNumber double) " +
-        "STORED BY 'org.apache.carbondata.format'")
+        "STORED AS carbondata")
     sql("insert overwrite table test select * from complexcarbontable")
     checkAnswer(sql("select count(*) from test"), Seq(Row(100)))
   }
@@ -287,7 +285,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
     sql(
       "create table complexcarbontable(roll int, student struct<id:int,name:string," +
       "marks:array<int>>) " +
-      "stored by 'carbondata'")
+      "STORED AS carbondata")
     sql("insert into complexcarbontable values(1, named_struct('id', 1, 'name', 'abc', 'marks', array(1,null,null)))")
     checkAnswer(sql("select * from complexcarbontable"),
       Seq(Row(1, Row(1, "abc", mutable.WrappedArray.make(Array(1, null, null))))))
@@ -299,13 +297,13 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
     sql(
       "create table complexcarbontable(struct_dbl struct<double1:double,double2:double," +
       "double3:double>) " +
-      "stored by 'carbondata'")
+      "STORED AS carbondata")
     sql("insert into complexcarbontable values(named_struct('double1', 10000000, 'double2', 300000, 'double3', 3000))")
     checkExistence(sql("select * from complexcarbontable"), true, "1.0E7,300000.0,3000.0")
     sql("Drop table if exists complexcarbontable")
     sql(
-      "create table complexcarbontable(struct_arr struct<array_db1:array<double>>) stored by " +
-      "'carbondata'")
+      "create table complexcarbontable(struct_arr struct<array_db1:array<double>>) " +
+      "STORED AS carbondata")
     sql("insert into complexcarbontable values(named_struct('array_db1', array(5555555.9559,12345678991234567,3444.999)))")
     checkExistence(sql("select * from complexcarbontable"),
       true,
@@ -358,7 +356,7 @@ class ComplexDataTypeTestCase extends QueryTest with BeforeAndAfterAll {
     writer.close()
     sql("DROP TABLE IF EXISTS sdkOutputTable")
     sql(
-      s"""CREATE EXTERNAL TABLE sdkOutputTable STORED BY 'carbondata' LOCATION
+      s"""CREATE EXTERNAL TABLE sdkOutputTable STORED AS carbondata LOCATION
          |'$writerPath' """.stripMargin)
 
     checkAnswer(sql("select * from sdkOutputTable"), Seq(Row("abcde", 34, Row(100.0))))
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableAsSelectTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableAsSelectTestCase.scala
index 0e52f85..659205a 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableAsSelectTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableAsSelectTestCase.scala
@@ -34,9 +34,9 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
   //Check create table as select with select from same table name when table exists
   test("CreateTableAsSelect_001_01", Include) {
    sql("drop table if exists ctas_same_table_name").collect
-   sql("CREATE TABLE ctas_same_table_name(key INT, value STRING) STORED by 'carbondata'").collect
+   sql("CREATE TABLE ctas_same_table_name(key INT, value STRING) STORED AS carbondata").collect
    intercept[Exception] {
-     sql("create table ctas_same_table_name stored by 'carbondata' as select * from ctas_same_table_name")
+     sql("create table ctas_same_table_name STORED AS carbondata as select * from ctas_same_table_name")
    }
   }
 
@@ -44,57 +44,57 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
   test("CreateTableAsSelect_001_02", Include) {
     sql("drop table if exists ctas_same_table_name").collect
     intercept[Exception] {
-      sql("create table ctas_same_table_name stored by 'carbondata' as select * from ctas_same_table_name")
+      sql("create table ctas_same_table_name STORED AS carbondata as select * from ctas_same_table_name")
     }
   }
 
   //Check create table as select with select from same table name with if not exists clause
   test("CreateTableAsSelect_001_03", Include) {
     sql("drop table if exists ctas_same_table_name").collect
-    sql("CREATE TABLE ctas_same_table_name(key INT, value STRING) STORED by 'carbondata'").collect
-    sql("create table if not exists ctas_same_table_name stored by 'carbondata' as select * from ctas_same_table_name").collect
+    sql("CREATE TABLE ctas_same_table_name(key INT, value STRING) STORED AS carbondata").collect
+    sql("create table if not exists ctas_same_table_name STORED AS carbondata as select * from ctas_same_table_name").collect
     assert(true)
   }
 
   //Check create table as select with select from another carbon table
   test("CreateTableAsSelect_001_04", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_carbon").collect
-    sql("create table ctas_select_carbon stored by 'carbondata' as select * from carbon_ctas_test").collect
+    sql("create table ctas_select_carbon STORED AS carbondata as select * from carbon_ctas_test").collect
     checkAnswer(sql("select * from ctas_select_carbon"), sql("select * from carbon_ctas_test"))
   }
 
   //Check create table as select with select from another parquet table
   test("CreateTableAsSelect_001_05", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_parquet").collect
-    sql("create table ctas_select_parquet stored by 'carbondata' as select * from parquet_ctas_test").collect
+    sql("create table ctas_select_parquet STORED AS carbondata as select * from parquet_ctas_test").collect
     checkAnswer(sql("select * from ctas_select_parquet"), sql("select * from parquet_ctas_test"))
   }
 
   //Check test create table as select with select from another hive/orc table
   test("CreateTableAsSelect_001_06", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_orc").collect
-    sql("create table ctas_select_orc stored by 'carbondata' as select * from orc_ctas_test").collect
+    sql("create table ctas_select_orc STORED AS carbondata as select * from orc_ctas_test").collect
     checkAnswer(sql("select * from ctas_select_orc"), sql("select * from orc_ctas_test"))
   }
 
   //Check create table as select with where clause in select from carbon table that returns data
   test("CreateTableAsSelect_001_07", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_where_carbon").collect
-    sql("create table ctas_select_where_carbon stored by 'carbondata' as select * from carbon_ctas_test where key=100").collect
+    sql("create table ctas_select_where_carbon STORED AS carbondata as select * from carbon_ctas_test where key=100").collect
     checkAnswer(sql("select * from ctas_select_where_carbon"), sql("select * from carbon_ctas_test where key=100"))
   }
 
   //Check create table as select with where clause in select from carbon table that does not return data
   test("CreateTableAsSelect_001_08", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_where_carbon").collect
-    sql("create table ctas_select_where_carbon stored by 'carbondata' as select * from carbon_ctas_test where key=300").collect
+    sql("create table ctas_select_where_carbon STORED AS carbondata as select * from carbon_ctas_test where key=300").collect
     checkAnswer(sql("select * from ctas_select_where_carbon"), sql("select * from carbon_ctas_test where key=300"))
   }
 
   //Check create table as select with where clause in select from carbon table and load again
   test("CreateTableAsSelect_001_09", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_where_carbon").collect
-    sql("create table ctas_select_where_carbon stored by 'carbondata' as select * from carbon_ctas_test where key=100").collect
+    sql("create table ctas_select_where_carbon STORED AS carbondata as select * from carbon_ctas_test where key=100").collect
     sql("insert into ctas_select_where_carbon select 200,'hive'").collect
     checkAnswer(sql("select * from ctas_select_where_carbon"), sql("select * from carbon_ctas_test"))
   }
@@ -102,21 +102,21 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
   //Check create table as select with where clause in select from parquet table
   test("CreateTableAsSelect_001_10", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_where_parquet").collect
-    sql("create table ctas_select_where_parquet stored by 'carbondata' as select * from parquet_ctas_test where key=100").collect
+    sql("create table ctas_select_where_parquet STORED AS carbondata as select * from parquet_ctas_test where key=100").collect
     checkAnswer(sql("select * from ctas_select_where_parquet"), sql("select * from parquet_ctas_test where key=100"))
   }
 
   //Check create table as select with where clause in select from hive/orc table
   test("CreateTableAsSelect_001_11", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_where_orc").collect
-    sql("create table ctas_select_where_orc stored by 'carbondata' as select * from orc_ctas_test where key=100").collect
+    sql("create table ctas_select_where_orc STORED AS carbondata as select * from orc_ctas_test where key=100").collect
     checkAnswer(sql("select * from ctas_select_where_orc"), sql("select * from orc_ctas_test where key=100"))
   }
 
   //Check create table as select with select directly having the data
   test("CreateTableAsSelect_001_12", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_direct_data").collect
-    sql("create table ctas_select_direct_data stored by 'carbondata' as select 300,'carbondata'").collect
+    sql("create table ctas_select_direct_data STORED AS carbondata as select 300,'carbondata'").collect
     checkAnswer(sql("select * from ctas_select_direct_data"), Seq(Row(300,"carbondata")))
   }
 
@@ -124,9 +124,9 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
   test("CreateTableAsSelect_001_13", Include) {
     sql("DROP TABLE IF EXISTS ctas_select_hugedata1").collect
     sql("DROP TABLE IF EXISTS ctas_select_hugedata2").collect
-    sql(s"""CREATE TABLE ctas_select_hugedata1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED BY 'carbondata'""").collect
+    sql(s"""CREATE TABLE ctas_select_hugedata1 (CUST_ID int,CUST_NAME String,ACTIVE_EMUI_VERSION string, DOB timestamp, DOJ timestamp, BIGINT_COLUMN1 bigint,BIGINT_COLUMN2 bigint,DECIMAL_COLUMN1 decimal(30,10), DECIMAL_COLUMN2 decimal(36,10),Double_COLUMN1 double, Double_COLUMN2 double,INTEGER_COLUMN1 int) STORED AS carbondata""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/uniqdata/7000_UniqData.csv' into table ctas_select_hugedata1 OPTIONS('DELIMITER'=',' , 'QUOTECHAR'='"','COMMENTCHAR'='#','MULTILINE'='true','ESCAPECHAR'='\','BAD_RECORDS_ACTION'='REDIRECT','BAD_RECORDS_LOGGER_ENABLE'='TRUE','FILEHEADER'='CUST_ID,CUST_NAME,ACTIVE_EMUI_VERSION,DOB,DOJ,BIGINT_COLUMN1,BIGINT_COLUMN2,DECIMAL_COLUMN1,DECIMAL_COLUMN2,Double_COLUMN1,Double_COLUMN2,INTEGER_COLUMN1')""").collect
-    sql("create table ctas_select_hugedata2 stored by 'carbondata' as select * from ctas_select_hugedata1").collect
+    sql("create table ctas_select_hugedata2 STORED AS carbondata as select * from ctas_select_hugedata1").collect
     checkAnswer(sql("select * from ctas_select_hugedata1"), sql("select * from ctas_select_hugedata2"))
     sql("DROP TABLE IF EXISTS ctas_select_hugedata1").collect
     sql("DROP TABLE IF EXISTS ctas_select_hugedata2").collect
@@ -138,7 +138,7 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
     sql(
       """
         | CREATE TABLE ctas_select_where_parquet
-        | STORED BY 'carbondata'
+        | STORED AS carbondata
         | AS SELECT * FROM parquet_ctas_test
         | WHERE key=300
       """.stripMargin).collect
@@ -152,7 +152,7 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
     sql(
       """
         | CREATE TABLE ctas_select_where_orc
-        | STORED BY 'carbondata'
+        | STORED AS carbondata
         | AS SELECT * FROM orc_ctas_test
         | WHERE key=100
       """.stripMargin).collect
@@ -167,7 +167,7 @@ class CreateTableAsSelectTestCase extends QueryTest with BeforeAndAfterAll {
    sql("DROP TABLE IF EXISTS orc_ctas_test")
 
    // create carbon table and insert data
-   sql("CREATE TABLE carbon_ctas_test(key INT, value STRING) STORED by 'carbondata'")
+   sql("CREATE TABLE carbon_ctas_test(key INT, value STRING) STORED AS carbondata")
    sql("insert into carbon_ctas_test select 100,'spark'")
    sql("insert into carbon_ctas_test select 200,'hive'")
 
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableWithLocalDictionaryTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableWithLocalDictionaryTestCase.scala
index 70325ff..5186699 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableWithLocalDictionaryTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/CreateTableWithLocalDictionaryTestCase.scala
@@ -33,7 +33,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
       """.stripMargin)
 
     val desc_result = sql("describe formatted local1")
@@ -49,7 +49,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='name')
       """.
         stripMargin)
@@ -68,7 +68,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='')
         """.
           stripMargin)
@@ -85,7 +85,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='abc')
         """.
           stripMargin)
@@ -102,7 +102,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='id')
         """.
           stripMargin)
@@ -120,7 +120,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='name')
         """.
           stripMargin)
@@ -132,7 +132,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000')
       """.stripMargin)
 
@@ -151,7 +151,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='-100')
       """.stripMargin)
 
@@ -167,7 +167,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='21474874811')
       """.stripMargin)
 
@@ -183,7 +183,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='')
       """.stripMargin)
 
@@ -199,7 +199,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='hello')
       """.stripMargin)
 
@@ -216,7 +216,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000','local_dictionary_include'='name')
       """.stripMargin)
 
@@ -239,7 +239,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='-100','local_dictionary_include'='name')
       """.stripMargin)
 
@@ -262,7 +262,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='','local_dictionary_include'='name')
       """.stripMargin)
 
@@ -285,7 +285,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='vdslv','local_dictionary_include'='name')
       """.stripMargin)
 
@@ -309,7 +309,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000','local_dictionary_include'='name,name')
         """.stripMargin)
     }
@@ -323,7 +323,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000','local_dictionary_include'=' ')
         """.stripMargin)
     }
@@ -337,7 +337,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000','local_dictionary_include'='hello')
         """.stripMargin)
     }
@@ -351,7 +351,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000','local_dictionary_include'='name')
         """.stripMargin)
     }
@@ -365,7 +365,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='','local_dictionary_include'='name,name')
         """.stripMargin)
     }
@@ -379,7 +379,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='-100','local_dictionary_include'='Hello')
         """.stripMargin)
     }
@@ -393,7 +393,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='23213497321591234324',
           | 'local_dictionary_include'='name')
         """.stripMargin)
@@ -405,7 +405,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='true')
       """.stripMargin)
 
     val desc_result = sql("describe formatted local1")
@@ -424,7 +424,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_include'='name','local_dictionary_enable'='true')
       """.
         stripMargin)
@@ -445,7 +445,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='name,name')
         """.stripMargin)
     }
@@ -457,7 +457,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='')
         """.
           stripMargin)
@@ -475,7 +475,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='abc')
         """.
           stripMargin)
@@ -492,7 +492,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='id')
         """.
           stripMargin)
@@ -510,7 +510,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true',
           | 'local_dictionary_include'='name')
         """.
@@ -523,7 +523,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_exclude'='name','local_dictionary_enable'='true')
       """.
         stripMargin)
@@ -544,7 +544,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_exclude'='name,name')
         """.stripMargin)
     }
@@ -556,7 +556,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_exclude'='')
         """.
           stripMargin)
@@ -574,7 +574,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_exclude'='abc')
         """.
           stripMargin)
@@ -591,7 +591,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_exclude'='id')
         """.
           stripMargin)
@@ -609,7 +609,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true',
           | 'local_dictionary_exclude'='name')
         """.
@@ -625,7 +625,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_exclude'='name','local_dictionary_include'='city',
         | 'local_dictionary_enable'='true')
       """.
@@ -651,7 +651,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int,add string)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_exclude'='name','local_dictionary_include'='city','sort_columns'='add',
         | 'local_dictionary_enable'='true')
       """.
@@ -678,7 +678,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_exclude'='name','local_dictionary_include'='city',
         | 'local_dictionary_enable'='false')
       """.
@@ -701,7 +701,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_exclude'='name','local_dictionary_include'='city',
           | 'local_dictionary_enable'='true')
         """.
@@ -718,7 +718,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='name,city',
           | 'local_dictionary_exclude'='name')
         """.
@@ -735,7 +735,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       """
         | CREATE TABLE local1(id int, name string, city string, age int,st struct<s_id:int,
         | s_name:string,s_city:array<string>>)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_exclude'='name','local_dictionary_include'='city,st',
         | 'local_dictionary_enable'='true')
       """.
@@ -758,7 +758,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       """
         | CREATE TABLE local1(id int, name string, city string, age int,st array<struct<s_id:int,
         | s_name:string>>)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_exclude'='name','local_dictionary_include'='city,st',
         | 'local_dictionary_enable'='true')
       """.
@@ -777,7 +777,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000')
       """.stripMargin)
 
@@ -796,7 +796,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='-100')
       """.stripMargin)
 
@@ -812,7 +812,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='21474874811')
       """.stripMargin)
 
@@ -828,7 +828,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='')
       """.stripMargin)
 
@@ -844,7 +844,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='hello')
       """.stripMargin)
 
@@ -862,7 +862,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000',
         | 'local_dictionary_include'='name')
       """.stripMargin)
@@ -887,7 +887,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='-100',
         | 'local_dictionary_include'='name')
       """.stripMargin)
@@ -912,7 +912,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='',
         | 'local_dictionary_include'='name')
       """.stripMargin)
@@ -937,7 +937,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='vdslv',
         | 'local_dictionary_include'='name')
       """.stripMargin)
@@ -963,7 +963,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000',
           | 'local_dictionary_include'='name,name')
         """.stripMargin)
@@ -979,7 +979,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000',
           | 'local_dictionary_include'=' ')
         """.stripMargin)
@@ -995,7 +995,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000',
           | 'local_dictionary_include'='hello')
         """.stripMargin)
@@ -1011,7 +1011,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000',
           | 'local_dictionary_include'='name' )
         """.stripMargin)
@@ -1027,7 +1027,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='',
           | 'local_dictionary_include'='name,name')
         """.stripMargin)
@@ -1043,7 +1043,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='-100',
           | 'local_dictionary_include'='Hello')
         """.stripMargin)
@@ -1059,7 +1059,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       sql(
         """
           | CREATE TABLE local1(id int, name string, city string, age int)
-          | STORED BY 'org.apache.carbondata.format'
+          | STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true',
           | 'local_dictionary_threshold'='23213497321591234324','local_dictionary_include'='name')
         """.stripMargin)
@@ -1071,7 +1071,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
 
     val desc_result = sql("describe formatted local1")
@@ -1087,7 +1087,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_include'='name','local_dictionary_enable'='false')
       """.
         stripMargin)
@@ -1103,7 +1103,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_include'='name,name')
       """.stripMargin)
     val descFormatted1 = sql("describe formatted local1").collect
@@ -1117,7 +1117,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_include'='')
       """.
         stripMargin)
@@ -1132,7 +1132,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_include'='abc')
       """.
         stripMargin)
@@ -1147,7 +1147,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_include'='id')
       """.
         stripMargin)
@@ -1162,7 +1162,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false' ,
         | 'local_dictionary_include'='name')
       """.
@@ -1178,7 +1178,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='20000')
       """.stripMargin)
 
@@ -1194,7 +1194,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='-100')
       """.stripMargin)
 
@@ -1210,7 +1210,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='21474874811')
       """.stripMargin)
 
@@ -1226,7 +1226,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='')
       """.stripMargin)
 
@@ -1242,7 +1242,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='hello')
       """.stripMargin)
 
@@ -1260,7 +1260,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='20000',
         | 'local_dictionary_include'='name')
       """.stripMargin)
@@ -1279,7 +1279,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='-100',
         | 'local_dictionary_include'='name')
       """.stripMargin)
@@ -1298,7 +1298,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='',
         | 'local_dictionary_include'='name')
       """.stripMargin)
@@ -1317,7 +1317,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='vdslv',
         | 'local_dictionary_include'='name')
       """.stripMargin)
@@ -1337,7 +1337,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='20000',
         | 'local_dictionary_include'='name,name')
       """.stripMargin)
@@ -1355,7 +1355,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='20000',
         | 'local_dictionary_include'=' ')
       """.stripMargin)
@@ -1374,7 +1374,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='20000',
         | 'local_dictionary_include'='hello')
       """.stripMargin)
@@ -1393,7 +1393,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='20000',
         | 'local_dictionary_include'='name' )
       """.stripMargin)
@@ -1412,7 +1412,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='',
         | 'local_dictionary_include'='name,name')
       """.stripMargin)
@@ -1431,7 +1431,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false','local_dictionary_threshold'='-100',
         | 'local_dictionary_include'='Hello')
       """.stripMargin)
@@ -1450,7 +1450,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('local_dictionary_enable'='false',
         | 'local_dictionary_threshold'='23213497321591234324','local_dictionary_include'='name')
       """.stripMargin)
@@ -1467,7 +1467,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties(
         | 'sort_scope'='global_sort',
         | 'sort_columns'='city,name')
@@ -1488,7 +1488,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties('sort_scope'='local_sort',
         | 'sort_columns'='city,name')
       """.stripMargin)
@@ -1507,7 +1507,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties(
         | 'sort_scope'='no_sort',
         | 'sort_columns'='city,name')
@@ -1527,7 +1527,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format'
+        | STORED AS carbondata
         | tblproperties(
         | 'sort_scope'='local_sort',
         | 'sort_columns'='city,name')
@@ -1549,11 +1549,11 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     sql(
       """
-        | create table local1 stored by 'carbondata' tblproperties('local_dictionary_enable'='true') as
+        | create table local1 STORED AS carbondata tblproperties('local_dictionary_enable'='true') as
         | select * from local
       """.stripMargin)
 
@@ -1574,11 +1574,11 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     sql(
       """
-        | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+        | CREATE TABLE local1 STORED AS carbondata
         | tblproperties('local_dictionary_include'='name','local_dictionary_enable'='true')
         | as select * from local
       """.
@@ -1600,12 +1600,12 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     intercept[MalformedCarbonCommandException] {
       sql(
         """
-          | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+          | CREATE TABLE local1 STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='name,name')
           | as select * from local
         """.stripMargin)
@@ -1619,12 +1619,12 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     val exception = intercept[MalformedCarbonCommandException] {
       sql(
         """
-          | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+          | CREATE TABLE local1 STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='')
           | as select * from local
         """.
@@ -1643,12 +1643,12 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     val exception1 = intercept[MalformedCarbonCommandException] {
       sql(
         """
-          | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+          | CREATE TABLE local1 STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='abc')
           | as select * from local
         """.
@@ -1666,12 +1666,12 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     val exception = intercept[MalformedCarbonCommandException] {
       sql(
         """
-          | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+          | CREATE TABLE local1 STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='id')
           | as select * from local
         """.
@@ -1690,12 +1690,12 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     intercept[MalformedCarbonCommandException] {
       sql(
         """
-          | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+          | CREATE TABLE local1 STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true' ,
           | 'local_dictionary_include'='name') as select * from local
         """.
@@ -1709,11 +1709,11 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     sql(
       """
-        | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+        | CREATE TABLE local1 STORED AS carbondata
         | tblproperties('local_dictionary_exclude'='name','local_dictionary_enable'='true')
         | as select * from local
       """.
@@ -1734,12 +1734,12 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     intercept[MalformedCarbonCommandException] {
       sql(
         """
-          | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+          | CREATE TABLE local1 STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_exclude'='name,name')
           | as select * from local
         """.stripMargin)
@@ -1752,12 +1752,12 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     val exception = intercept[MalformedCarbonCommandException] {
       sql(
         """
-          | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+          | CREATE TABLE local1 STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_exclude'='')
           | as select * from local
         """.
@@ -1775,12 +1775,12 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     val exception1 = intercept[MalformedCarbonCommandException] {
       sql(
         """
-          | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+          | CREATE TABLE local1 STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_exclude'='abc')
           | as select * from local
         """.
@@ -1798,12 +1798,12 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     val exception = intercept[MalformedCarbonCommandException] {
       sql(
         """
-          | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+          | CREATE TABLE local1 STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_exclude'='id')
           | as select * from local
         """.
@@ -1822,12 +1822,12 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     intercept[MalformedCarbonCommandException] {
       sql(
         """
-          | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+          | CREATE TABLE local1 STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true' ,
           | 'local_dictionary_exclude'='name') as select * from local
         """.
@@ -1844,11 +1844,11 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     sql(
       """
-        | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+        | CREATE TABLE local1 STORED AS carbondata
         | tblproperties('local_dictionary_exclude'='name','local_dictionary_include'='city',
         | 'local_dictionary_enable'='true') as select * from local
       """.
@@ -1876,11 +1876,11 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     sql(
       """
-        | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+        | CREATE TABLE local1 STORED AS carbondata
         | tblproperties('local_dictionary_exclude'='name','local_dictionary_include'='city',
         | 'local_dictionary_enable'='false') as select * from local
       """.
@@ -1903,12 +1903,12 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     intercept[MalformedCarbonCommandException] {
       sql(
         """
-          | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+          | CREATE TABLE local1 STORED AS carbondata
           | tblproperties('local_dictionary_exclude'='name','local_dictionary_include'='city',
           | 'local_dictionary_enable'='true') as select * from local
         """.
@@ -1925,12 +1925,12 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     intercept[MalformedCarbonCommandException] {
       sql(
         """
-          | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+          | CREATE TABLE local1 STORED AS carbondata
           | tblproperties('local_dictionary_enable'='true','local_dictionary_include'='name,city',
           | 'local_dictionary_exclude'='name') as select * from local
         """.
@@ -1948,11 +1948,11 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       """
         | CREATE TABLE local(id int, name string, city string, age int,st struct<s_id:int,
         | s_name:string,s_city:array<string>>)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='true')
       """.stripMargin)
     sql(
       """
-        | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+        | CREATE TABLE local1 STORED AS carbondata
         | tblproperties('local_dictionary_exclude'='name','local_dictionary_include'='city,st',
         | 'local_dictionary_enable'='false') as select * from local
       """.
@@ -1969,11 +1969,11 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     sql(
       """
-        | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+        | CREATE TABLE local1 STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000')
         | as select * from local
       """.stripMargin)
@@ -1994,11 +1994,11 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     sql(
       """
-        | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+        | CREATE TABLE local1 STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='-100')
         | as select * from local
       """.stripMargin)
@@ -2016,11 +2016,11 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='false')
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='false')
       """.stripMargin)
     sql(
       """
-        | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+        | CREATE TABLE local1 STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='23589714365172595')
         | as select * from local
       """.stripMargin)
@@ -2042,7 +2042,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
       """.stripMargin)
     sql(
       """
-        | CREATE TABLE local1 STORED BY 'org.apache.carbondata.format'
+        | CREATE TABLE local1 STORED AS carbondata
         | tblproperties('local_dictionary_enable'='true','local_dictionary_threshold'='20000','local_dictionary_include'='city')
         | as select * from local
       """.stripMargin)
@@ -2066,7 +2066,7 @@ class CreateTableWithLocalDictionaryTestCase extends QueryTest with BeforeAndAft
     sql(
       """
         | CREATE TABLE local1(id int, name string, city string, age int)
-        | STORED BY 'org.apache.carbondata.format' tblproperties('local_dictionary_enable'='true',
+        | STORED AS carbondata tblproperties('local_dictionary_enable'='true',
         | 'local_dictionary_threshold'='20000','local_dictionary_include'='city','no_inverted_index'='name')
       """.stripMargin)
 
diff --git a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingIUDTestCase.scala b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingIUDTestCase.scala
index 952b90b..d6efd46 100644
--- a/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingIUDTestCase.scala
+++ b/integration/spark-common-cluster-test/src/test/scala/org/apache/carbondata/cluster/sdv/generated/DataLoadingIUDTestCase.scala
@@ -62,7 +62,7 @@ class DataLoadingIUDTestCase extends QueryTest with BeforeAndAfterAll with Befor
     sql("drop table if exists t_carbn01b").collect
     sql("drop table if exists T_Hive1").collect
     sql("drop table if exists T_Hive6").collect
-    sql(s"""create table default.t_carbn01b(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table default.t_carbn01b(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1.csv' INTO table default.t_carbn01B options ('DELIMITER'=',', 'QUOTECHAR'='\', 'FILEHEADER'='Active_status,Item_type_cd,Qty_day_avg,Qty_total,Sell_price,Sell_pricep,Discount_price,Profit,Item_code,Item_name,Outlet_name,Update_time,Create_date')""").collect
 
   }
@@ -84,7 +84,7 @@ test("IUD-01-01-01_001-001", Include) {
  sql(s"""LOAD DATA INPATH '$resourcesPath/Data/InsertData/T_Hive1_hive10.csv' overwrite into table T_Hive1""").collect
  sql("create table T_Hive6(Item_code STRING, Sub_item_cd ARRAY<string>)row format delimited fields terminated by ',' collection items terminated by '$'")
  sql(s"""load data inpath '$resourcesPath/Data/InsertData/T_Hive1_hive11.csv' overwrite into table T_Hive6""").collect
- sql(s"""create table t_carbn02(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table t_carbn02(Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into t_carbn02 select * from default.t_carbn01b limit 4""").collect
   checkAnswer(s"""select count(*) from t_carbn01b""",
     Seq(Row(10)), "DataLoadingIUDTestCase_IUD-01-01-01_001-001")
@@ -94,7 +94,7 @@ test("IUD-01-01-01_001-001", Include) {
 
 //Check for update Carbon table using a data value
 test("IUD-01-01-01_001-01", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (active_status, profit) = ('YES',1) where active_status = 'TRUE'""").collect
   checkAnswer(s"""select active_status,profit from default.t_carbn01  where active_status='YES' group by active_status,profit""",
@@ -106,7 +106,7 @@ test("IUD-01-01-01_001-01", Include) {
 //Check for update Carbon table using a data value on a string column where it was udpated before
 test("IUD-01-01-01_001-02", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (active_status) = ('YES') where active_status = 'TRUE'""").collect
  sql(s"""update default.t_carbn01  set (active_status) = ('NO') where active_status = 'YES'""").collect
@@ -120,7 +120,7 @@ test("IUD-01-01-01_001-02", Include) {
 test("IUD-01-01-01_001-03", Include) {
   intercept[Exception] {
    sql(s"""drop table IF EXISTS default.t_carbn01""").collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (active_status) = (NO) """).collect
     sql(s"""NA""").collect
@@ -133,7 +133,7 @@ test("IUD-01-01-01_001-03", Include) {
 //Check for update Carbon table using a data value on a string column using numeric value
 test("IUD-01-01-01_001-04", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (active_status) = (234530508098098098080)""").collect
   checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
@@ -145,7 +145,7 @@ test("IUD-01-01-01_001-04", Include) {
 //Check for update Carbon table using a data value on a string column using numeric value in single quote
 test("IUD-01-01-01_001-05", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (active_status) = ('234530508098098098080')""").collect
   checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
@@ -157,7 +157,7 @@ test("IUD-01-01-01_001-05", Include) {
 //Check for update Carbon table using a data value on a string column using decimal value
 test("IUD-01-01-01_001-06", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (active_status) = (2.55860986095689088)""").collect
   checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
@@ -169,7 +169,7 @@ test("IUD-01-01-01_001-06", Include) {
 //Check for update Carbon table using a data value on a string column using decimal value
 test("IUD-01-01-01_001-07", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (active_status) = ('2.55860986095689088')""").collect
   checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
@@ -181,7 +181,7 @@ test("IUD-01-01-01_001-07", Include) {
 //Check for update Carbon table using a data value on a string column using string value which is having special characters
 test("IUD-01-01-01_001-11", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (active_status) = ('fdfdskflksdf#?…..fdffs')""").collect
   checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
@@ -193,7 +193,7 @@ test("IUD-01-01-01_001-11", Include) {
 //Check for update Carbon table using a data value on a string column using array value having ')'
 //test("IUD-01-01-01_001-12", Include) {
 //   sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-// sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+// sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
 // sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
 // sql(s"""update default.t_carbn01  set (active_status) = ('abd$asjdh$adasj$l;sdf$*)$*)(&^)')""").collect
 //  checkAnswer(s"""select count(*) from t_carbn01b""",
@@ -206,7 +206,7 @@ test("IUD-01-01-01_001-11", Include) {
 test("IUD-01-01-01_001-14", Include) {
   intercept[Exception] {
     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
     sql(s"""update default.t_carbn01  set (item_status_cd)  = ('10')""").collect
     sql(s"""NA""").collect
@@ -218,7 +218,7 @@ test("IUD-01-01-01_001-14", Include) {
 //Check for update Carbon table for a numeric value column
 test("IUD-01-01-01_001-15", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (item_type_cd)  = (10)""").collect
   checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
@@ -230,7 +230,7 @@ test("IUD-01-01-01_001-15", Include) {
 //Check for update Carbon table for a numeric value column in single quote
 test("IUD-01-01-01_001-16", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (item_type_cd)  = ('10')""").collect
   checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
@@ -243,7 +243,7 @@ test("IUD-01-01-01_001-16", Include) {
 test("IUD-01-01-01_001-17", Include) {
   intercept[Exception] {
     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
     sql(s"""update default.t_carbn01  set (item_type_cd)  = ('Orange')""").collect
     sql(s"""NA""").collect
@@ -256,7 +256,7 @@ test("IUD-01-01-01_001-17", Include) {
 test("IUD-01-01-01_001-18", Include) {
   intercept[Exception] {
     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
     sql(s"""update default.t_carbn01  set (item_type_cd)  = ('10.11')""").collect
     sql(s"""NA""").collect
@@ -268,7 +268,7 @@ test("IUD-01-01-01_001-18", Include) {
 //Check for update Carbon table for a numeric Int value column using large numeric value
 test("IUD-01-01-01_001-19", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (item_type_cd)  = (2147483647)""").collect
   checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
@@ -280,7 +280,7 @@ test("IUD-01-01-01_001-19", Include) {
 //Check for update Carbon table for a numeric Int value column using large numeric negative value
 test("IUD-01-01-01_001-20", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (item_type_cd)  = (-2147483648)""").collect
   checkAnswer(s"""select item_type_cd from default.t_carbn01  group by item_type_cd""",
@@ -293,7 +293,7 @@ test("IUD-01-01-01_001-20", Include) {
 test("IUD-01-01-01_001-21", Include) {
   intercept[Exception] {
     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
     sql(s"""update default.t_carbn01  set (item_type_cd)  = (-2147483649)""").collect
     sql(s"""NA""").collect
@@ -305,7 +305,7 @@ test("IUD-01-01-01_001-21", Include) {
 //Check for update Carbon table for a numeric BigInt value column using large numeric value which is at the boundary of 64 bit
 test("IUD-01-01-01_001-22", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (sell_price)  = (9223372036854775807)""").collect
   checkAnswer(s"""select sell_price from default.t_carbn01  group by sell_price""",
@@ -317,7 +317,7 @@ test("IUD-01-01-01_001-22", Include) {
 //Check for update Carbon table for a decimal value column using decimal value
 test("IUD-01-01-01_001-23", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (profit) = (1.11)""").collect
   checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
@@ -329,7 +329,7 @@ test("IUD-01-01-01_001-23", Include) {
 //Check for update Carbon table for a decimal value column using decimal value in quote
 test("IUD-01-01-01_001-24", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (profit)  = ('1.11')""").collect
   checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
@@ -341,7 +341,7 @@ test("IUD-01-01-01_001-24", Include) {
 //Check for update Carbon table for a decimal value column using numeric value
 test("IUD-01-01-01_001-25", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (profit)  = (1)""").collect
   checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
@@ -353,7 +353,7 @@ test("IUD-01-01-01_001-25", Include) {
 //Check for update Carbon table for a decimal value column (3,2) using numeric value which is greater than the allowed
 test("IUD-01-01-01_001-26", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (profit)  = (10)""").collect
   checkAnswer(s"""select count(Active_status) from default.t_carbn01 where profit = 10 """,
@@ -366,7 +366,7 @@ test("IUD-01-01-01_001-26", Include) {
 test("IUD-01-01-01_001-27", Include) {
   intercept[Exception] {
     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
     sql(s"""update default.t_carbn01  set (profit)  = ('hakshk')""").collect
     sql(s"""NA""").collect
@@ -378,7 +378,7 @@ test("IUD-01-01-01_001-27", Include) {
 //Check for update Carbon table for a decimal value(3,2) column using a decimal value which is having 1 decimal
 test("IUD-01-01-01_001-28", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (profit)  = ('1.1')""").collect
   checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
@@ -390,7 +390,7 @@ test("IUD-01-01-01_001-28", Include) {
 //Check for update Carbon table for a decimal value(3,2) column using a decimal value which is having 3 decimal
 test("IUD-01-01-01_001-29", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (profit)  = ('1.118')""").collect
   checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
@@ -402,7 +402,7 @@ test("IUD-01-01-01_001-29", Include) {
 //Check for update Carbon table for a double column using a decimal value which is having 3 decimal
 test("IUD-01-01-01_001-30", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set (sell_pricep)  = ('10.1116756')""").collect
   checkAnswer(s"""select sell_pricep from default.t_carbn01  group by sell_pricep""",
@@ -414,7 +414,7 @@ test("IUD-01-01-01_001-30", Include) {
 //Check for update Carbon table for a time stamp  value column using date timestamp
 test("IUD-01-01-01_001-31", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set(update_time) = ('2016-11-04 18:13:59.113')""").collect
   checkAnswer(s"""select update_time from default.t_carbn01  group by update_time""",
@@ -427,7 +427,7 @@ test("IUD-01-01-01_001-31", Include) {
 test("IUD-01-01-01_001-35", Include) {
   intercept[Exception] {
     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
     sql(s"""update default.t_carbn01  set(update_time) = ('04-11-20004 18:13:59.113')""").collect
     sql(s"""NA""").collect
@@ -440,7 +440,7 @@ test("IUD-01-01-01_001-35", Include) {
 test("IUD-01-01-01_001-32", Include) {
   intercept[Exception] {
     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
     sql(s"""update default.t_carbn01  set(update_time) = ('fhjfhjfdshf')""").collect
     sql(s"""NA""").collect
@@ -453,7 +453,7 @@ test("IUD-01-01-01_001-32", Include) {
 test("IUD-01-01-01_001-33", Include) {
   intercept[Exception] {
     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
     sql(s"""update default.t_carbn01  set(update_time) = (56546)""").collect
     sql(s"""NA""").collect
@@ -466,7 +466,7 @@ test("IUD-01-01-01_001-33", Include) {
 test("IUD-01-01-01_001-34", Include) {
   intercept[Exception] {
     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
     sql(s"""update default.t_carbn01  set(update_time) = ('2016-11-04')""").collect
     sql(s"""NA""").collect
@@ -479,7 +479,7 @@ test("IUD-01-01-01_001-34", Include) {
 test("IUD-01-01-01_001-36", Include) {
   intercept[Exception] {
     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
     sql(s"""update default.t_carbn01  set(update_time) = ('2016-11-04 18:63:59.113')""").collect
     sql(s"""NA""").collect
@@ -491,7 +491,7 @@ test("IUD-01-01-01_001-36", Include) {
 //Check for update Carbon table for a time stamp  value column using date timestamp
 test("IUD-01-01-01_001-37", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set(update_time) = ('2016-11-04 18:13:59.113435345345433 ')""").collect
   checkAnswer(s"""select update_time from default.t_carbn01  group by update_time""",
@@ -503,7 +503,7 @@ test("IUD-01-01-01_001-37", Include) {
 //Check update Carbon table using a * operation on a column value
 test("IUD-01-01-01_001-40", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set(profit, item_type_cd)= (profit*1.2, item_type_cd*3)""").collect
   checkAnswer(s"""select profit, item_type_cd from default.t_carbn01  group by profit, item_type_cd""",
@@ -516,7 +516,7 @@ test("IUD-01-01-01_001-40", Include) {
 test("IUD-01-01-01_001-41", Include) {
   intercept[Exception] {
     sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
-    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+    sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
     sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
     sql(s"""update default.t_carbn01  set(item_type_cd)= (item_type_cd/1)""").collect
     sql(s"""NA""").collect
@@ -528,7 +528,7 @@ test("IUD-01-01-01_001-41", Include) {
 //Check update Carbon table using a / operation on a column value
 test("IUD-01-01-01_001-42", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set(profit)= (profit/1)""").collect
   checkAnswer(s"""select profit from default.t_carbn01  group by profit""",
@@ -540,7 +540,7 @@ test("IUD-01-01-01_001-42", Include) {
 //Check update Carbon table using a - operation on a column value
 test("IUD-01-01-01_001-43", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set(profit, item_type_cd)= (profit-1.2, item_type_cd-3)""").collect
   checkAnswer(s"""select profit, item_type_cd from default.t_carbn01  group by profit, item_type_cd""",
@@ -552,7 +552,7 @@ test("IUD-01-01-01_001-43", Include) {
 //Check update Carbon table using a + operation on a column value
 test("IUD-01-01-01_001-44", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set(profit, item_type_cd)= (profit+1.2, item_type_cd+qty_day_avg)""").collect
   checkAnswer(s"""select profit, item_type_cd from default.t_carbn01  where profit = 3.64 and item_type_cd = 4291""",
@@ -564,7 +564,7 @@ test("IUD-01-01-01_001-44", Include) {
 //Check update Carbon table using a + operation on a column value which is string
 test("IUD-01-01-01_001-45", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  set(item_code) = (item_code+1)""").collect
   checkAnswer(s"""select count(*) from t_carbn01""",
@@ -576,7 +576,7 @@ test("IUD-01-01-01_001-45", Include) {
 //Check for update Carbon table without where clause
 test("IUD-01-01-01_002-01", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""Update default.t_carbn01  set (active_status) = ('NO')""").collect
   checkAnswer(s"""select active_status from default.t_carbn01  group by active_status""",
@@ -588,7 +588,7 @@ test("IUD-01-01-01_002-01", Include) {
 //Check for update Carbon table with where clause
 test("IUD-01-01-01_002-02", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""Update default.t_carbn01  set (active_status) = ('NO') where active_status = 'TRUE' """).collect
   checkAnswer(s"""select active_status from default.t_carbn01  where active_status='NO' group by active_status""",
@@ -599,7 +599,7 @@ test("IUD-01-01-01_002-02", Include) {
 
 //Check for update Carbon table with where exists clause
 test("IUD-01-01-01_002-03", Include) {
-   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+   sql(s"""create table if not exists default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""Update default.t_carbn01  X set (active_status) = ('NO') where exists (select 1 from default.t_carbn01b Y where Y.item_code = X.item_code)""").collect
   checkAnswer(s"""select active_status from default.t_carbn01   group by active_status""",
@@ -611,7 +611,7 @@ test("IUD-01-01-01_002-03", Include) {
 //Check for delete Carbon table without where clause
 test("IUD-01-01-01_002-04", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""Delete from default.t_carbn01 """).collect
   checkAnswer(s"""select count(*) from default.t_carbn01 """,
@@ -623,7 +623,7 @@ test("IUD-01-01-01_002-04", Include) {
 //Check for delete Carbon table with where clause
 test("IUD-01-01-01_002-05", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""Delete from default.t_carbn01  where active_status = 'TRUE'""").collect
   checkAnswer(s"""select count(*) from default.t_carbn01  where active_status='TRUE'""",
@@ -635,7 +635,7 @@ test("IUD-01-01-01_002-05", Include) {
 //Check for delete Carbon table with where exists clause
 test("IUD-01-01-01_002-06", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""Delete from default.t_carbn01  X where exists (select 1 from default.t_carbn01b Y where Y.item_code = X.item_code)""").collect
   checkAnswer(s"""select count(*) from default.t_carbn01 """,
@@ -646,7 +646,7 @@ test("IUD-01-01-01_002-06", Include) {
 //Check for update Carbon table using query involving filters
 test("IUD-01-01-01_003-03", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
  sql(s"""update default.t_carbn01  a set ( a.item_type_cd, a.profit) = ( select b.item_type_cd, b.profit from default.t_carbn01b b where b.item_type_cd = 2)""").collect
   checkAnswer(s"""select item_type_cd, profit from default.t_carbn01  limit 1""",
@@ -658,7 +658,7 @@ test("IUD-01-01-01_003-03", Include) {
 //Check for update Carbon table using query involving sub query
 test("IUD-01-01-01_003-04", Include) {
    sql(s"""drop table IF EXISTS default.t_carbn01 """).collect
- sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED BY 'org.apache.carbondata.format'""").collect
+ sql(s"""create table default.t_carbn01 (Active_status String,Item_type_cd INT,Qty_day_avg INT,Qty_total INT,Sell_price BIGINT,Sell_pricep DOUBLE,Discount_price DOUBLE,Profit DECIMAL(3,2),Item_code String,Item_name String,Outlet_name String,Update_time TIMESTAMP,Create_date String)STORED AS carbondata""").collect
  sql(s"""insert into default.t_carbn01  select * from default.t_carbn01b""").collect
... 31330 lines suppressed ...


Mime
View raw message