carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jack...@apache.org
Subject [01/38] incubator-carbondata git commit: reuse test case for integration module
Date Sat, 07 Jan 2017 16:36:35 GMT
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master b0750c192 -> 49727a273


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
b/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
deleted file mode 100644
index adb7a1c..0000000
--- a/integration/spark2/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableTestCase.scala
+++ /dev/null
@@ -1,162 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.carbondata.spark.testsuite.allqueries
-
-import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.common.util.QueryTest
-import org.scalatest.BeforeAndAfterAll
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
-class InsertIntoCarbonTableTestCase extends QueryTest with BeforeAndAfterAll {
-  var timeStampPropOrig: String = _
-  override def beforeAll {
-    dropTableIfExists
-    timeStampPropOrig = CarbonProperties.getInstance().getProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT)
-    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
-    sql("create table THive (imei string,deviceInformationId int,MAC string,deviceColor string,device_backColor
string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit string,CPIClocked
string,series string,productionDate timestamp,bomCode string,internalModels string, deliveryTime
string, channelsId string, channelsName string , deliveryAreaId string, deliveryCountry string,
deliveryProvince string, deliveryCity string,deliveryDistrict string, deliveryStreet string,
oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string, ActiveCountry string,
ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet string, ActiveOperatorId
string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion string,
Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string, Active_webUITypeCarrVer
string,Active_webTypeDataVerNumber string, Active_operatorsVersion string, Active_phonePADPartitionedVersions
st
 ring, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string, Latest_areaId
string, Latest_country string, Latest_province string, Latest_city string, Latest_district
string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion
string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string,
Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion
string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription
string,gamePointId double,contractNumber BigInt) ROW FORMAT DELIMITED FIELDS TERMINATED BY
','")
-    sql("LOAD DATA local INPATH '../spark/src/test/resources/100_olap.csv' INTO TABLE THive")
-  }
-  test("insert from hive") {
-    sql("create table TCarbon1 (imei string,deviceInformationId int,MAC string,deviceColor
string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit
string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels
string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string,
deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string,
deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string,
ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet
string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion
string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string,
Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion
string, Active_phonePADPartitionedVersions
  string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string,
Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district
string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion
string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string,
Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion
string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription
string,gamePointId double,contractNumber BigInt) USING org.apache.spark.sql.CarbonSource OPTIONS('dbName'='default','tableName'='TCarbon1')")
-    sql("insert into TCarbon1 select * from THive")
-    checkAnswer(
-        sql("select imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_opera
 torsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription
from THive order by imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Lates
 t_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription"),
-        sql("select imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_opera
 torsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription
from TCarbon1 order by imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,La
 test_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription")
-    )
-  }
-  test("insert from hive-sum expression") {
-    sql("create table TCarbon2 (MAC string,deviceInformationIdSum int) USING org.apache.spark.sql.CarbonSource
OPTIONS('dbName'='default','tableName'='TCarbon2')")
-    sql("insert into TCarbon2 select MAC,sum(deviceInformationId+ 10) as a from THive group
by MAC")
-    checkAnswer(
-         sql("select MAC,deviceInformationIdSum from TCarbon2 order by MAC"),
-         sql("select MAC,sum(deviceInformationId+ 10) as a from THive group by MAC order
by MAC")
-     )
-  }
-  test("insert from carbon-select columns") {
-    sql("create table TCarbon3 (imei string,deviceInformationId int,MAC string,deviceColor
string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit
string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels
string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string,
deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string,
deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string,
ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet
string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion
string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string,
Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion
string, Active_phonePADPartitionedVersions
  string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string,
Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district
string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion
string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string,
Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion
string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription
string,gamePointId double,contractNumber BigInt) USING org.apache.spark.sql.CarbonSource OPTIONS('dbName'='default','tableName'='TCarbon3')")
-    sql("insert into TCarbon3 select * from TCarbon1")
-    checkAnswer(
-        sql("select imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_opera
 torsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription
from TCarbon1 order by imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,La
 test_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription"),
-        sql("select imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_opera
 torsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription
from TCarbon3 order by imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,La
 test_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription")
-    )
-  }
-  test("insert from carbon-select columns-source table has more column then target column")
{
-    sql("create table TCarbon10 (imei string,deviceInformationId int,MAC string,deviceColor
string,device_backColor string) USING org.apache.spark.sql.CarbonSource OPTIONS('dbName'='default','tableName'='TCarbon10')")
-    try{
-      sql("insert into TCarbon10 select * from TCarbon1")
-      assert(false)
-    } catch {
-      case ex: AnalysisException =>
-        if (ex.getMessage().contains("the number of columns are different")) {
-          assert(true)
-        } else {
-          assert(false)
-        }
-      case _ => assert(false)
-    }
-  }
-  test("insert from carbon-select * columns") {
-    sql("create table TCarbon4 (imei string,deviceInformationId int,MAC string,deviceColor
string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit
string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels
string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string,
deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string,
deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string,
ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet
string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion
string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string,
Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion
string, Active_phonePADPartitionedVersions
  string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string,
Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district
string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion
string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string,
Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion
string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription
string,gamePointId double,contractNumber BigInt) USING org.apache.spark.sql.CarbonSource OPTIONS('dbName'='default','tableName'='TCarbon4')")
-    sql("insert into TCarbon4 select * from TCarbon1")
-    checkAnswer(
-        sql("select * from TCarbon1"),
-        sql("select * from TCarbon4")
-    )
-  }
-  test("insert->hive column more than carbon column->success") {
-    sql("create table TCarbon5 (imei string,deviceInformationId int,MAC string,deviceColor
string,gamePointId double,contractNumber BigInt) USING org.apache.spark.sql.CarbonSource OPTIONS('dbName'='default','tableName'='TCarbon5')")
-    try {
-      sql("insert into TCarbon5 select imei,deviceInformationId,MAC,deviceColor,gamePointId,contractNumber,device_backColor,modelId,CUPAudit,CPIClocked
from THive")
-      assert(false)
-    } catch {
-      case ex: AnalysisException =>
-        if (ex.getMessage().contains("the number of columns are different")) {
-          assert(true)
-        } else {
-          assert(false)
-        }
-      case _ => assert(false)
-    }
-
-  }
-  test("insert->carbon column is more then hive-fails") {
-    sql("create table TCarbon6 (imei string,deviceInformationId int,MAC string,deviceColor
string,gamePointId double,contractNumber BigInt) USING org.apache.spark.sql.CarbonSource OPTIONS('dbName'='default','tableName'='TCarbon6')")
-    try {
-      sql("insert into TCarbon6 select imei,deviceInformationId,MAC,deviceColor,gamePointId
from THive")
-      assert(false)
-    } catch  {
-      case ex: Exception => assert(true)
-    }
-  }
-  test("insert->insert wrong data types-pass") {
-    sql("create table TCarbon7 (imei string,deviceInformationId int,MAC string) USING org.apache.spark.sql.CarbonSource
OPTIONS('dbName'='default','tableName'='TCarbon7')")
-    sql("insert into TCarbon7 select imei,MAC,deviceInformationId from THive")
-    sql("create table THive7 (imei string,deviceInformationId int,MAC string) ROW FORMAT
DELIMITED FIELDS TERMINATED BY ','")
-    sql("insert into THive7 select imei,MAC,deviceInformationId from THive")
-    checkAnswer(
-        sql("select imei,deviceInformationId,MAC from TCarbon7"),
-        sql("select imei,deviceInformationId,MAC from THive7")
-    )
-  }
-  test("insert->insert empty data -pass") {
-    sql("create table TCarbon8 (imei string,deviceInformationId int,MAC string) USING org.apache.spark.sql.CarbonSource
OPTIONS('dbName'='default','tableName'='TCarbon8')")
-    sql("insert into TCarbon8 select imei,deviceInformationId,MAC from THive where MAC='wrongdata'")
-    checkAnswer(
-        sql("select imei,deviceInformationId,MAC from THive where MAC='wrongdata'"),
-        sql("select imei,deviceInformationId,MAC from TCarbon8 where MAC='wrongdata'")
-    )
-  }
-  test("insert into existing load-pass") {
-    sql("create table TCarbon9 (imei string,deviceInformationId int,MAC string,deviceColor
string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit
string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels
string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string,
deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string,
deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string,
ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet
string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion
string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string,
Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion
string, Active_phonePADPartitionedVersions
  string, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string,
Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district
string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion
string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string,
Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion
string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription
string,gamePointId double,contractNumber BigInt) USING org.apache.spark.sql.CarbonSource OPTIONS('dbName'='default','tableName'='TCarbon9')")
-    sql("create table THive9 (imei string,deviceInformationId int,MAC string,deviceColor
string,device_backColor string,modelId string,marketName string,AMSize string,ROMSize string,CUPAudit
string,CPIClocked string,series string,productionDate timestamp,bomCode string,internalModels
string, deliveryTime string, channelsId string, channelsName string , deliveryAreaId string,
deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict string,
deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId string,
ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string, ActiveStreet
string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion string, Active_operaSysVersion
string, Active_BacVerNumber string, Active_BacFlashVer string, Active_webUIVersion string,
Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string, Active_operatorsVersion
string, Active_phonePADPartitionedVersions s
 tring, Latest_YEAR int, Latest_MONTH int, Latest_DAY Decimal(30,10), Latest_HOUR string,
Latest_areaId string, Latest_country string, Latest_province string, Latest_city string, Latest_district
string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion string, Latest_operaSysVersion
string, Latest_BacVerNumber string, Latest_BacFlashVer string, Latest_webUIVersion string,
Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string, Latest_operatorsVersion
string, Latest_phonePADPartitionedVersions string, Latest_operatorId string, gamePointDescription
string,gamePointId double,contractNumber BigInt) ROW FORMAT DELIMITED FIELDS TERMINATED BY
','")
-    sql("insert into TCarbon9 select * from THive")
-    sql("insert into TCarbon9 select * from THive")
-    sql("insert into THive9 select * from THive")
-    sql("insert into THive9 select * from THive")
-    checkAnswer(
-        sql("select imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_opera
 torsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription
from THive9 order by imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Late
 st_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription"),
-        sql("select imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_opera
 torsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription
from TCarbon9 order by imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIVersion,Latest_operaSysVersion,Latest_BacVerNumber,La
 test_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription")
-    )
-  }
-  override def afterAll {
-    dropTableIfExists
-  }
-
-  def dropTableIfExists: Unit = {
-    sql("DROP TABLE IF EXISTS THive")
-    sql("drop table if exists TCarbonSource3")
-    sql("drop table if exists TCarbonSource4")
-    sql("drop table if exists load")
-    sql("drop table if exists inser")
-    sql("drop table if exists TCarbon1")
-    sql("drop table if exists TCarbon2")
-    sql("drop table if exists TCarbon3")
-    sql("drop table if exists TCarbon4")
-    sql("drop table if exists TCarbon5")
-    sql("drop table if exists TCarbon6")
-    sql("drop table if exists TCarbon7")
-    sql("drop table if exists TCarbon8")
-    sql("drop table if exists TCarbon9")
-    if (timeStampPropOrig != null) {
-      CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
timeStampPropOrig)
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark2/src/test/scala/org/apache/spark/carbondata/CarbonDataSourceSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/CarbonDataSourceSuite.scala
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/CarbonDataSourceSuite.scala
index 3ba9f6a..97a180b 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/CarbonDataSourceSuite.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/CarbonDataSourceSuite.scala
@@ -22,13 +22,12 @@ import org.scalatest.BeforeAndAfterAll
 
 class CarbonDataSourceSuite extends QueryTest with BeforeAndAfterAll {
   override def beforeAll(): Unit = {
-    clean
     // Drop table
-    spark.sql("DROP TABLE IF EXISTS carbon_testtable")
-    spark.sql("DROP TABLE IF EXISTS csv_table")
+    sql("DROP TABLE IF EXISTS carbon_testtable")
+    sql("DROP TABLE IF EXISTS csv_table")
 
     // Create table
-    spark.sql(
+    sql(
       s"""
          | CREATE TABLE carbon_testtable(
          |    shortField short,
@@ -41,7 +40,7 @@ class CarbonDataSourceSuite extends QueryTest with BeforeAndAfterAll {
          | USING org.apache.spark.sql.CarbonSource
        """.stripMargin)
 
-    spark.sql(
+    sql(
       s"""
          | CREATE TABLE csv_table
          | (  shortField short,
@@ -56,19 +55,19 @@ class CarbonDataSourceSuite extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll(): Unit = {
-    spark.sql("drop table carbon_testtable")
-    spark.sql("DROP TABLE IF EXISTS csv_table")
+    sql("drop table carbon_testtable")
+    sql("DROP TABLE IF EXISTS csv_table")
   }
 
   test("project") {
-    spark.sql("select * from carbon_testtable").collect()
+    sql("select * from carbon_testtable").collect()
   }
 
   test("agg") {
-    spark.sql("select stringField, sum(intField) , sum(decimalField) " +
+    sql("select stringField, sum(intField) , sum(decimalField) " +
       "from carbon_testtable group by stringField").collect()
 
-    spark.sql(
+    sql(
       s"""
          | INSERT INTO TABLE carbon_testtable
          | SELECT shortField, intField, bigintField, doubleField, stringField,

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark2/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
index 5b69c9c..b3bab49 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/bucketing/TableBucketingTestCase.scala
@@ -19,9 +19,6 @@
 
 package org.apache.spark.carbondata.bucketing
 
-import java.io.File
-
-import org.apache.commons.io.FileUtils
 import org.apache.spark.sql.common.util.QueryTest
 import org.apache.spark.sql.execution.command.LoadTable
 import org.apache.spark.sql.execution.exchange.ShuffleExchange
@@ -38,17 +35,17 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll
{
 
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
-    spark.conf.set("spark.sql.autoBroadcastJoinThreshold", "-1")
-    spark.sql("DROP TABLE IF EXISTS t3")
-    spark.sql("DROP TABLE IF EXISTS t4")
-    spark.sql("DROP TABLE IF EXISTS t5")
-    spark.sql("DROP TABLE IF EXISTS t6")
-    spark.sql("DROP TABLE IF EXISTS t7")
-    spark.sql("DROP TABLE IF EXISTS t8")
+    sqlContext.setConf("spark.sql.autoBroadcastJoinThreshold", "-1")
+    sql("DROP TABLE IF EXISTS t3")
+    sql("DROP TABLE IF EXISTS t4")
+    sql("DROP TABLE IF EXISTS t5")
+    sql("DROP TABLE IF EXISTS t6")
+    sql("DROP TABLE IF EXISTS t7")
+    sql("DROP TABLE IF EXISTS t8")
   }
 
   test("test create table with buckets") {
-    spark.sql(
+    sql(
       """
            CREATE TABLE t4
            (ID Int, date Timestamp, country String,
@@ -56,8 +53,8 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll {
            USING org.apache.spark.sql.CarbonSource
            OPTIONS("bucketnumber"="4", "bucketcolumns"="name", "tableName"="t4")
       """)
-    LoadTable(Some("default"), "t4", "./src/test/resources/dataDiff.csv", Nil,
-      Map(("use_kettle", "false"))).run(spark)
+    LoadTable(Some("default"), "t4", s"$resourcesPath/dataDiff.csv", Nil,
+      Map(("use_kettle", "false"))).run(sqlContext.sparkSession)
     val table: CarbonTable = CarbonMetadata.getInstance().getCarbonTable("default_t4")
     if (table != null && table.getBucketingInfo("t4") != null) {
       assert(true)
@@ -67,7 +64,7 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
   test("test create table with no bucket join of carbon tables") {
-    spark.sql(
+    sql(
       """
            CREATE TABLE t5
            (ID Int, date Timestamp, country String,
@@ -75,10 +72,10 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll
{
            USING org.apache.spark.sql.CarbonSource
            OPTIONS("tableName"="t5")
       """)
-    LoadTable(Some("default"), "t5", "./src/test/resources/dataDiff.csv", Nil,
-      Map(("use_kettle", "false"))).run(spark)
+    LoadTable(Some("default"), "t5", s"$resourcesPath/dataDiff.csv", Nil,
+      Map(("use_kettle", "false"))).run(sqlContext.sparkSession)
 
-    val plan = spark.sql(
+    val plan = sql(
       """
         |select t1.*, t2.*
         |from t5 t1, t5 t2
@@ -92,7 +89,7 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
   test("test create table with bucket join of carbon tables") {
-    spark.sql(
+    sql(
       """
            CREATE TABLE t6
            (ID Int, date Timestamp, country String,
@@ -100,10 +97,10 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll
{
            USING org.apache.spark.sql.CarbonSource
            OPTIONS("bucketnumber"="4", "bucketcolumns"="name", "tableName"="t6")
       """)
-    LoadTable(Some("default"), "t6", "./src/test/resources/dataDiff.csv", Nil,
-      Map(("use_kettle", "false"))).run(spark)
+    LoadTable(Some("default"), "t6", s"$resourcesPath/dataDiff.csv", Nil,
+      Map(("use_kettle", "false"))).run(sqlContext.sparkSession)
 
-    val plan = spark.sql(
+    val plan = sql(
       """
         |select t1.*, t2.*
         |from t6 t1, t6 t2
@@ -117,7 +114,7 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll
{
   }
 
   test("test create table with bucket join of carbon table and parquet table") {
-    spark.sql(
+    sql(
       """
            CREATE TABLE t7
            (ID Int, date Timestamp, country String,
@@ -125,16 +122,16 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll
{
            USING org.apache.spark.sql.CarbonSource
            OPTIONS("bucketnumber"="4", "bucketcolumns"="name", "tableName"="t7")
       """)
-    LoadTable(Some("default"), "t7", "./src/test/resources/dataDiff.csv", Nil,
-      Map(("use_kettle", "false"))).run(spark)
+    LoadTable(Some("default"), "t7", s"$resourcesPath/dataDiff.csv", Nil,
+      Map(("use_kettle", "false"))).run(sqlContext.sparkSession)
 
-    spark.sql("DROP TABLE IF EXISTS bucketed_parquet_table")
-    spark.sql("select * from t7").write
+    sql("DROP TABLE IF EXISTS bucketed_parquet_table")
+    sql("select * from t7").write
       .format("parquet")
       .bucketBy(4, "name")
       .saveAsTable("bucketed_parquet_table")
 
-    val plan = spark.sql(
+    val plan = sql(
       """
         |select t1.*, t2.*
         |from t7 t1, bucketed_parquet_table t2
@@ -148,7 +145,7 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll
{
   }
 
   test("test create table with bucket join of carbon table and non bucket parquet table")
{
-    spark.sql(
+    sql(
       """
            CREATE TABLE t8
            (ID Int, date Timestamp, country String,
@@ -156,15 +153,15 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll
{
            USING org.apache.spark.sql.CarbonSource
            OPTIONS("bucketnumber"="4", "bucketcolumns"="name", "tableName"="t8")
       """)
-    LoadTable(Some("default"), "t8", "./src/test/resources/dataDiff.csv", Nil,
-      Map(("use_kettle", "false"))).run(spark)
+    LoadTable(Some("default"), "t8", s"$resourcesPath/dataDiff.csv", Nil,
+      Map(("use_kettle", "false"))).run(sqlContext.sparkSession)
 
-    spark.sql("DROP TABLE IF EXISTS parquet_table")
-    spark.sql("select * from t8").write
+    sql("DROP TABLE IF EXISTS parquet_table")
+    sql("select * from t8").write
       .format("parquet")
       .saveAsTable("parquet_table")
 
-    val plan = spark.sql(
+    val plan = sql(
       """
         |select t1.*, t2.*
         |from t8 t1, parquet_table t2
@@ -178,11 +175,11 @@ class TableBucketingTestCase extends QueryTest with BeforeAndAfterAll
{
   }
 
   override def afterAll {
-    spark.sql("DROP TABLE IF EXISTS t3")
-    spark.sql("DROP TABLE IF EXISTS t4")
-    spark.sql("DROP TABLE IF EXISTS t5")
-    spark.sql("DROP TABLE IF EXISTS t6")
-    spark.sql("DROP TABLE IF EXISTS t7")
-    spark.sql("DROP TABLE IF EXISTS t8")
+    sql("DROP TABLE IF EXISTS t3")
+    sql("DROP TABLE IF EXISTS t4")
+    sql("DROP TABLE IF EXISTS t5")
+    sql("DROP TABLE IF EXISTS t6")
+    sql("DROP TABLE IF EXISTS t7")
+    sql("DROP TABLE IF EXISTS t8")
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark2/src/test/scala/org/apache/spark/carbondata/util/QueryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/util/QueryTest.scala
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/util/QueryTest.scala
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark2/src/test/scala/org/apache/spark/carbondata/vectorreader/VectorReaderTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/vectorreader/VectorReaderTestCase.scala
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/vectorreader/VectorReaderTestCase.scala
index 6cce86a..491a4ff 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/vectorreader/VectorReaderTestCase.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/vectorreader/VectorReaderTestCase.scala
@@ -30,13 +30,12 @@ class VectorReaderTestCase extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll {
 
-    clean
-    spark.sql("DROP TABLE IF EXISTS vectorreader")
+    sql("DROP TABLE IF EXISTS vectorreader")
     // clean data folder
 
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
-    spark.sql(
+    sql(
     """
            CREATE TABLE default.vectorreader
            (ID Int, date Timestamp, country String,
@@ -46,8 +45,8 @@ class VectorReaderTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
   test("test vector reader") {
-    spark.conf.set("carbon.enable.vector.reader", "true")
-    val plan = spark.sql(
+    sqlContext.setConf("carbon.enable.vector.reader", "true")
+    val plan = sql(
       """select * from vectorreader""".stripMargin).queryExecution.executedPlan
     var batchReader = false
     plan.collect {
@@ -57,8 +56,8 @@ class VectorReaderTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
   test("test without vector reader") {
-    spark.conf.set("carbon.enable.vector.reader", "false")
-    val plan = spark.sql(
+    sqlContext.setConf("carbon.enable.vector.reader", "false")
+    val plan = sql(
       """select * from vectorreader""".stripMargin).queryExecution.executedPlan
     var rowReader = false
     plan.collect {
@@ -68,8 +67,6 @@ class VectorReaderTestCase extends QueryTest with BeforeAndAfterAll {
   }
 
   override def afterAll {
-    spark.sql("DROP TABLE IF EXISTS vectorreader")
-    // clean data folder
-    clean
+    sql("DROP TABLE IF EXISTS vectorreader")
   }
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
b/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
index ac96070..93d1282 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/sql/common/util/QueryTest.scala
@@ -17,65 +17,31 @@
 
 package org.apache.spark.sql.common.util
 
-import java.io.File
 import java.util.{Locale, TimeZone}
 
 import scala.collection.JavaConversions._
 
-import org.apache.commons.io.FileUtils
 import org.apache.spark.sql.catalyst.plans._
 import org.apache.spark.sql.catalyst.util._
-import org.apache.spark.sql.{DataFrame, Row, SQLContext, SparkSession}
+import org.apache.spark.sql.test.TestQueryExecutor
+import org.apache.spark.sql.{DataFrame, Row}
 
 import org.apache.carbondata.common.logging.LogServiceFactory
-import org.apache.carbondata.core.util.CarbonProperties
 
 class QueryTest extends PlanTest {
 
-  val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+  private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
 
   // Timezone is fixed to America/Los_Angeles for those timezone sensitive tests (timestamp_*)
   TimeZone.setDefault(TimeZone.getTimeZone("America/Los_Angeles"))
   // Add Locale setting
   Locale.setDefault(Locale.US)
 
+  val sqlContext = TestQueryExecutor.INSTANCE.sqlContext
 
-  val rootPath = new File(this.getClass.getResource("/").getPath + "../../../..").getCanonicalPath
-  val storeLocation = s"$rootPath/integration/spark2/target/store"
-  val warehouse = s"$rootPath/integration/spark2/target/warehouse"
-  val metastoredb = s"$rootPath/integration/spark2/target/metastore_db"
+  val resourcesPath = TestQueryExecutor.resourcesPath
 
-  val spark = {
-
-    CarbonProperties.getInstance()
-      .addProperty("carbon.kettle.home", s"$rootPath/processing/carbonplugins")
-      .addProperty("carbon.storelocation", storeLocation)
-
-    import org.apache.spark.sql.CarbonSession._
-    val spark = SparkSession
-        .builder()
-        .master("local")
-        .appName("CarbonExample")
-        .enableHiveSupport()
-        .config("spark.sql.warehouse.dir", warehouse)
-        .config("javax.jdo.option.ConnectionURL",
-          s"jdbc:derby:;databaseName=$metastoredb;create=true")
-        .getOrCreateCarbonSession()
-
-    spark.sparkContext.setLogLevel("WARN")
-    spark
-  }
-
-  val Dsc = spark.sparkContext
-
-  lazy val implicits = spark.implicits
-
-  def sql(sqlText: String): DataFrame  = spark.sql(sqlText)
-
-  def clean(): Unit = {
-    val clean = (path: String) => FileUtils.deleteDirectory(new File(path))
-    clean(storeLocation)
-  }
+  def sql(sqlText: String): DataFrame  = TestQueryExecutor.INSTANCE.sql(sqlText)
 
   /**
    * Runs the plan and makes sure the answer contains all of the keywords, or the
@@ -96,9 +62,9 @@ class QueryTest extends PlanTest {
     }
   }
 
-  def sqlTest(sqlString: String, expectedAnswer: Seq[Row])(implicit sqlContext: SQLContext)
{
+  def sqlTest(sqlString: String, expectedAnswer: Seq[Row]) {
     test(sqlString) {
-      checkAnswer(sqlContext.sql(sqlString), expectedAnswer)
+      checkAnswer(sql(sqlString), expectedAnswer)
     }
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
b/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
index f80b12d..8b86316 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/util/CarbonCommandSuite.scala
@@ -22,6 +22,7 @@ import java.sql.Timestamp
 import java.util.Date
 
 import org.apache.spark.sql.common.util.QueryTest
+import org.apache.spark.sql.test.TestQueryExecutor
 import org.scalatest.BeforeAndAfterAll
 
 import org.apache.carbondata.api.CarbonStore
@@ -30,10 +31,8 @@ import org.apache.carbondata.core.util.CarbonUtil
 class CarbonCommandSuite extends QueryTest with BeforeAndAfterAll {
 
   override def beforeAll(): Unit = {
-    val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../../spark2")
-        .getCanonicalPath
     dropTable("csv_table")
-    createAndLoadInputTable("csv_table", s"$currentDirectory/src/test/resources/data_alltypes.csv")
+    createAndLoadInputTable("csv_table", s"$resourcesPath/data_alltypes.csv")
     createAndLoadTestTable("carbon_table", "csv_table")
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/af2f204e/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index c1bfcbe..cb23ef7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -97,6 +97,7 @@
     <module>processing</module>
     <module>hadoop</module>
     <module>integration/spark-common</module>
+    <module>integration/spark-common-test</module>
     <module>assembly</module>
   </modules>
 


Mime
View raw message