carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jbono...@apache.org
Subject [01/56] [abbrv] incubator-carbondata git commit: [Issue 618]Supported Spark 1.6 in Carbondata (#670)
Date Thu, 23 Jun 2016 14:15:49 GMT
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master [created] 63d3284e4


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ead0076b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
index b19b744..4edfb7a 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/allqueries/AllDataTypesTestCaseAggregate.scala
@@ -40,11 +40,14 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll
{
     CarbonProperties.getInstance().addProperty("carbon.direct.surrogate","false")
     sql("create cube Carbon_automation_test dimensions(imei string,deviceInformationId integer,MAC
string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize
string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode
string,internalModels string, deliveryTime string, channelsId string, channelsName string
, deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict
string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId
string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string,
ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion
string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string,
Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string,
Active_operatorsVersion string, Active_
 phonePADPartitionedVersions string, Latest_YEAR integer, Latest_MONTH integer, Latest_DAY
integer, Latest_HOUR string, Latest_areaId string, Latest_country string, Latest_province
string, Latest_city string, Latest_district string, Latest_street string, Latest_releaseId
string, Latest_EMUIVersion string, Latest_operaSysVersion string, Latest_BacVerNumber string,
Latest_BacFlashVer string, Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber
string, Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId
string, gamePointDescription string)  measures(gamePointId integer,contractNumber integer)
OPTIONS (PARTITIONER [CLASS = 'org.carbondata.spark.partition.api.impl.SampleDataPartitionerImpl'
,COLUMNS= (imei) , PARTITION_COUNT=2] )");
     sql("LOAD DATA FACT FROM'"+currentDirectory+"/src/test/resources/100_olap.csv' INTO Cube
Carbon_automation_test partitionData(DELIMITER ',' ,QUOTECHAR '\"', FILEHEADER 'imei,deviceInformationId,MAC,deviceColor,device_backColor,modelId,marketName,AMSize,ROMSize,CUPAudit,CPIClocked,series,productionDate,bomCode,internalModels,deliveryTime,channelsId,channelsName,deliveryAreaId,deliveryCountry,deliveryProvince,deliveryCity,deliveryDistrict,deliveryStreet,oxSingleNumber,contractNumber,ActiveCheckTime,ActiveAreaId,ActiveCountry,ActiveProvince,Activecity,ActiveDistrict,ActiveStreet,ActiveOperatorId,Active_releaseId,Active_EMUIVersion,Active_operaSysVersion,Active_BacVerNumber,Active_BacFlashVer,Active_webUIVersion,Active_webUITypeCarrVer,Active_webTypeDataVerNumber,Active_operatorsVersion,Active_phonePADPartitionedVersions,Latest_YEAR,Latest_MONTH,Latest_DAY,Latest_HOUR,Latest_areaId,Latest_country,Latest_province,Latest_city,Latest_district,Latest_street,Latest_releaseId,Latest_EMUIV
 ersion,Latest_operaSysVersion,Latest_BacVerNumber,Latest_BacFlashVer,Latest_webUIVersion,Latest_webUITypeCarrVer,Latest_webTypeDataVerNumber,Latest_operatorsVersion,Latest_phonePADPartitionedVersions,Latest_operatorId,gamePointId,gamePointDescription')");
+    sql("create table if not exists Carbon_automation_hive (imei string,deviceInformationId
int,MAC string,deviceColor string,device_backColor string,modelId string,marketName string,AMSize
string,ROMSize string,CUPAudit string,CPIClocked string,series string,productionDate timestamp,bomCode
string,internalModels string, deliveryTime string, channelsId string, channelsName string
, deliveryAreaId string, deliveryCountry string, deliveryProvince string, deliveryCity string,deliveryDistrict
string, deliveryStreet string, oxSingleNumber string, ActiveCheckTime string, ActiveAreaId
string, ActiveCountry string, ActiveProvince string, Activecity string, ActiveDistrict string,
ActiveStreet string, ActiveOperatorId string, Active_releaseId string, Active_EMUIVersion
string, Active_operaSysVersion string, Active_BacVerNumber string, Active_BacFlashVer string,
Active_webUIVersion string, Active_webUITypeCarrVer string,Active_webTypeDataVerNumber string,
Active_operatorsVersion string, Active
 _phonePADPartitionedVersions string, Latest_YEAR int, Latest_MONTH int, Latest_DAY int, Latest_HOUR
string, Latest_areaId string, Latest_country string, Latest_province string, Latest_city string,
Latest_district string, Latest_street string, Latest_releaseId string, Latest_EMUIVersion
string, Latest_operaSysVersion string, Latest_BacVerNumber string, Latest_BacFlashVer string,
Latest_webUIVersion string, Latest_webUITypeCarrVer string, Latest_webTypeDataVerNumber string,
Latest_operatorsVersion string, Latest_phonePADPartitionedVersions string, Latest_operatorId
string, gamePointDescription string, gamePointId int,contractNumber int) row format delimited
fields terminated by ','");
+    sql("LOAD DATA LOCAL INPATH '"+currentDirectory+"/src/test/resources/100_olap.csv' INTO
table Carbon_automation_hive ");
 
   }
 
   override def afterAll {
     sql("drop cube Carbon_automation_test")
+    sql("drop cube Carbon_automation_hive")
 
   }
 
@@ -414,7 +417,7 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll
{
   test("select variance(deviceInformationId) as a   from Carbon_automation_test")({
     checkAnswer(
       sql("select variance(deviceInformationId) as a   from Carbon_automation_test"),
-      Seq(Row(9.31041555963636E9)))
+      sql("select variance(deviceInformationId) as a   from Carbon_automation_hive"))
   })
    //TC_105
   test("select var_samp(deviceInformationId) as a  from Carbon_automation_test")({
@@ -455,7 +458,7 @@ class AllDataTypesTestCaseAggregate extends QueryTest with BeforeAndAfterAll
{
   test("select corr(deviceInformationId,deviceInformationId)  as a from Carbon_automation_test")({
     checkAnswer(
       sql("select corr(deviceInformationId,deviceInformationId)  as a from Carbon_automation_test"),
-      Seq(Row(1.0000000000000002)))
+      sql("select corr(deviceInformationId,deviceInformationId)  as a from Carbon_automation_hive"))
   })
 
   //TC_111

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ead0076b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionNoDictionaryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionNoDictionaryTest.scala
b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionNoDictionaryTest.scala
index 061b265..4012493 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionNoDictionaryTest.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionNoDictionaryTest.scala
@@ -18,11 +18,22 @@ import org.scalatest.BeforeAndAfterAll
   */
 class DataCompactionNoDictionaryTest extends QueryTest with BeforeAndAfterAll {
 
+  // return segment details
+  def getSegments(databaseName : String, tableName : String, tableId : String): List[String]
= {
+    val segmentStatusManager: SegmentStatusManager = new SegmentStatusManager(new
+        AbsoluteTableIdentifier(
+          CarbonProperties.getInstance.getProperty(CarbonCommonConstants.STORE_LOCATION),
+          new CarbonTableIdentifier(databaseName, tableName.toLowerCase , tableId)
+        )
+    )
+    val segments = segmentStatusManager.getValidSegments().listOfValidSegments.asScala.toList
+    segments
+  }
+
   override def beforeAll {
-    sql("drop table if exists  noDictionaryCompaction")
 
     sql(
-      "CREATE TABLE IF NOT EXISTS noDictionaryCompaction (country String, ID Int, date Timestamp,
name " +
+      "CREATE TABLE nodictionaryCompaction (country String, ID Int, date Timestamp, name
" +
         "String, " +
         "phonetype String, serialname String, salary Int) STORED BY 'org.apache.carbondata"
+
         ".format' TBLPROPERTIES('DICTIONARY_EXCLUDE'='country')"
@@ -37,17 +48,17 @@ class DataCompactionNoDictionaryTest extends QueryTest with BeforeAndAfterAll
{
 
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/mm/dd")
-    sql("LOAD DATA fact from '" + csvFilePath1 + "' INTO CUBE noDictionaryCompaction PARTITIONDATA"
+
-      "(DELIMITER ',', QUOTECHAR '\"')"
+    sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE nodictionaryCompaction
" +
+        "OPTIONS('DELIMITER' = ',')"
     )
-    sql("LOAD DATA fact from '" + csvFilePath2 + "' INTO CUBE noDictionaryCompaction  PARTITIONDATA"
+
-      "(DELIMITER ',', QUOTECHAR '\"')"
+    sql("LOAD DATA LOCAL INPATH '" + csvFilePath2 + "' INTO TABLE nodictionaryCompaction
" +
+        "OPTIONS('DELIMITER' = ',')"
     )
-    sql("LOAD DATA fact from '" + csvFilePath3 + "' INTO CUBE noDictionaryCompaction  PARTITIONDATA"
+
-      "(DELIMITER ',', QUOTECHAR '\"')"
+    sql("LOAD DATA LOCAL INPATH '" + csvFilePath3 + "' INTO TABLE nodictionaryCompaction
" +
+        "OPTIONS('DELIMITER' = ',')"
     )
     // compaction will happen here.
-    sql("alter table noDictionaryCompaction compact 'major'"
+    sql("alter table nodictionaryCompaction compact 'major'"
     )
 
     // wait for compaction to finish.
@@ -62,13 +73,7 @@ class DataCompactionNoDictionaryTest extends QueryTest with BeforeAndAfterAll
{
     var noOfRetries = 0
     while (status && noOfRetries < 10) {
 
-      val segmentStatusManager: SegmentStatusManager = new SegmentStatusManager(new
-          AbsoluteTableIdentifier(
-            CarbonProperties.getInstance.getProperty(CarbonCommonConstants.STORE_LOCATION),
-            new CarbonTableIdentifier("default", "noDictionaryCompaction", "1")
-          )
-      )
-      val segments = segmentStatusManager.getValidSegments().listOfValidSegments.asScala.toList
+      val segments: List[String] = getSegments("default", "nodictionaryCompaction", "uni21")
 
       if (!segments.contains("0.1")) {
         // wait for 2 seconds for compaction to complete.
@@ -81,11 +86,10 @@ class DataCompactionNoDictionaryTest extends QueryTest with BeforeAndAfterAll
{
     }
   }
 
-
-  test("select country from noDictionaryCompaction") {
+  test("select country from nodictionaryCompaction") {
     // check answers after compaction.
     checkAnswer(
-      sql("select country from noDictionaryCompaction"),
+      sql("select country from nodictionaryCompaction"),
       Seq(Row("america"),
         Row("canada"),
         Row("chile"),
@@ -107,16 +111,10 @@ class DataCompactionNoDictionaryTest extends QueryTest with BeforeAndAfterAll
{
 
   test("delete merged folder and execute query") {
     // delete merged segments
-   sql("clean files for table noDictionaryCompaction")
+   sql("clean files for table nodictionaryCompaction")
 
-    val segmentStatusManager: SegmentStatusManager = new SegmentStatusManager(new
-        AbsoluteTableIdentifier(
-          CarbonProperties.getInstance.getProperty(CarbonCommonConstants.STORE_LOCATION),
-          new CarbonTableIdentifier("default", "noDictionaryCompaction", "1")
-        )
-    )
     // merged segment should not be there
-    val segments   = segmentStatusManager.getValidSegments.listOfValidSegments.asScala.toList
+    val segments = getSegments("default", "nodictionaryCompaction", "uni21")
     assert(!segments.contains("0"))
     assert(!segments.contains("1"))
     assert(!segments.contains("2"))
@@ -124,7 +122,7 @@ class DataCompactionNoDictionaryTest extends QueryTest with BeforeAndAfterAll
{
 
     // now check the answers it should be same.
     checkAnswer(
-      sql("select country from noDictionaryCompaction"),
+      sql("select country from nodictionaryCompaction"),
       Seq(Row("america"),
         Row("canada"),
         Row("chile"),
@@ -145,7 +143,7 @@ class DataCompactionNoDictionaryTest extends QueryTest with BeforeAndAfterAll
{
   }
 
   override def afterAll {
-    sql("drop cube noDictionaryCompaction")
+    sql("drop table nodictionaryCompaction")
     CarbonProperties.getInstance()
       .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
         CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ead0076b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
index 7fb72f5..5b473e2 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataretention/DataRetentionTestCase.scala
@@ -96,7 +96,7 @@ class DataRetentionTestCase extends QueryTest with BeforeAndAfterAll {
 
   test("RetentionTest_withoutDelete") {
     checkAnswer(
-      sql("SELECT country, count(salary) AS amount FROM DataRetentionTable WHERE country"
+
+      sql("SELECT country, count(salary) AS amount FROM dataretentionTable WHERE country"
+
           " IN ('china','ind','aus','eng') GROUP BY country"
       ),
       Seq(Row("aus", 9), Row("ind", 9))
@@ -114,7 +114,7 @@ class DataRetentionTestCase extends QueryTest with BeforeAndAfterAll {
     val actualValue: String = getSegmentStartTime(segments, 1)
     // delete segments (0,1) which contains ind, aus
     sql(
-      "DELETE SEGMENTS FROM TABLE DataRetentionTable where STARTTIME before '" + actualValue
+ "'")
+      "DELETE SEGMENTS FROM TABLE dataretentionTable where STARTTIME before '" + actualValue
+ "'")
 
     // load segment 2 which contains eng
     sql(
@@ -130,9 +130,9 @@ class DataRetentionTestCase extends QueryTest with BeforeAndAfterAll {
 
   test("RetentionTest3_DeleteByLoadId") {
     // delete segment 2 and load ind segment
-    sql("DELETE LOAD 2 FROM TABLE DataRetentionTable")
+    sql("DELETE LOAD 2 FROM TABLE dataretentionTable")
     sql(
-      "LOAD DATA LOCAL INPATH '" + resource + "dataretention1.csv' INTO TABLE DataRetentionTable
" +
+      "LOAD DATA LOCAL INPATH '" + resource + "dataretention1.csv' INTO TABLE dataretentionTable
" +
       "OPTIONS('DELIMITER' = ',')")
     checkAnswer(
       sql("SELECT country, count(salary) AS amount FROM DataRetentionTable WHERE country"
+
@@ -140,7 +140,10 @@ class DataRetentionTestCase extends QueryTest with BeforeAndAfterAll
{
       ),
       Seq(Row("ind", 9))
     )
-    sql("clean files for table DataRetentionTable")
+
+    // these queries should execute without any error.
+    sql("show segments for table dataretentionTable")
+    sql("clean files for table dataretentionTable")
   }
 
   test("RetentionTest4_DeleteByInvalidLoadId") {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ead0076b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
index dc89a93..aeac733 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/deleteTable/TestDeleteTableNewDDL.scala
@@ -53,8 +53,31 @@ class TestDeleteTableNewDDL extends QueryTest with BeforeAndAfterAll {
 
   }
 
+  test("drop table using case insensitive table name") {
+    // create table
+    sql(
+      "CREATE table CaseInsensitiveTable (ID int, date String, country String, name " +
+      "String," +
+      "phonetype String, serialname String, salary int) stored by 'org.apache.carbondata.format'"
+
+      "TBLPROPERTIES('DICTIONARY_INCLUDE'='ID', 'DICTIONARY_INCLUDE'='salary')"
+    )
+    // table should drop wihout any error
+    sql("drop table caseInsensitiveTable")
+
+    // Now create same table, it should not give any error.
+    sql(
+      "CREATE table CaseInsensitiveTable (ID int, date String, country String, name " +
+      "String," +
+      "phonetype String, serialname String, salary int) stored by 'org.apache.carbondata.format'"
+
+      "TBLPROPERTIES('DICTIONARY_INCLUDE'='ID', 'DICTIONARY_INCLUDE'='salary')"
+    )
+
+  }
+
+
   override def afterAll: Unit = {
 
+    sql("drop table CaseSensitiveTable")
   }
 
 }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ead0076b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
index 31b36d1..506c0fe 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
@@ -97,6 +97,12 @@ class TimestampDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfte
 
   }
 
+  test("select count(doj) from directDictionaryCube") {
+    checkAnswer(
+      sql("select count(doj) from directDictionaryCube"),
+      Seq(Row(2))
+    )
+  }
 
   override def afterAll {
     sql("drop cube directDictionaryCube")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ead0076b/integration/spark/src/test/scala/org/carbondata/spark/util/AutoHighCardinalityIdentifyTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/util/AutoHighCardinalityIdentifyTestCase.scala
b/integration/spark/src/test/scala/org/carbondata/spark/util/AutoHighCardinalityIdentifyTestCase.scala
index fd8bb7c..ef59484 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/util/AutoHighCardinalityIdentifyTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/util/AutoHighCardinalityIdentifyTestCase.scala
@@ -108,7 +108,7 @@ class AutoHighCardinalityIdentifyTestCase extends QueryTest with BeforeAndAfterA
 
   def relation: CarbonRelation = {
     CarbonEnv.getInstance(CarbonHiveContext).carbonCatalog
-        .lookupRelation1(Option("default"), "highcard", None)(CarbonHiveContext)
+        .lookupRelation1(Option("default"), "highcard")(CarbonHiveContext)
         .asInstanceOf[CarbonRelation]
   }
   

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ead0076b/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilTestCase.scala
b/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilTestCase.scala
index 916d1f4..78a0eca 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilTestCase.scala
@@ -145,16 +145,16 @@ class GlobalDictionaryUtilTestCase extends QueryTest with BeforeAndAfterAll
{
 
   def buildRelation() = {
     val catalog = CarbonEnv.getInstance(CarbonHiveContext).carbonCatalog
-    sampleRelation = catalog.lookupRelation1(Option("default"), "sample", None)(CarbonHiveContext)
+    sampleRelation = catalog.lookupRelation1(Option("default"), "sample")(CarbonHiveContext)
       .asInstanceOf[CarbonRelation]
     dimSampleRelation = catalog
-      .lookupRelation1(Option("default"), "dimSample", None)(CarbonHiveContext)
+      .lookupRelation1(Option("default"), "dimSample")(CarbonHiveContext)
       .asInstanceOf[CarbonRelation]
     complexRelation = catalog
-      .lookupRelation1(Option("default"), "complextypes", None)(CarbonHiveContext)
+      .lookupRelation1(Option("default"), "complextypes")(CarbonHiveContext)
       .asInstanceOf[CarbonRelation]
     incrementalLoadTableRelation = catalog
-      .lookupRelation1(Option("default"), "incrementalLoadTable", None)(CarbonHiveContext)
+      .lookupRelation1(Option("default"), "incrementalLoadTable")(CarbonHiveContext)
       .asInstanceOf[CarbonRelation]
   }
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ead0076b/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 80d8c97..5df017a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -25,7 +25,7 @@
   <name>carbondata</name>
   <properties>
     <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-    <spark.version>1.5.2</spark.version>
+    <spark.version>1.6.1</spark.version>
     <scala.binary.version>2.10</scala.binary.version>
     <snappy.version>1.1.1.7</snappy.version>
     <hadoop.version>2.2.0</hadoop.version>

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ead0076b/processing/pom.xml
----------------------------------------------------------------------
diff --git a/processing/pom.xml b/processing/pom.xml
index a8b1e6e..5cef13d 100644
--- a/processing/pom.xml
+++ b/processing/pom.xml
@@ -65,17 +65,6 @@
       <version>${kettle.version}</version>
     </dependency>
     <dependency>
-      <groupId>eigenbase</groupId>
-      <artifactId>eigenbase-xom</artifactId>
-      <version>1.3.4</version>
-      <exclusions>
-        <exclusion>
-          <groupId>*</groupId>
-          <artifactId>*</artifactId>
-        </exclusion>
-      </exclusions>
-    </dependency>
-    <dependency>
       <groupId>commons-vfs</groupId>
       <artifactId>commons-vfs</artifactId>
       <version>1.0</version>

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/ead0076b/processing/src/main/java/org/carbondata/processing/store/CarbonDataFileAttributes.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/carbondata/processing/store/CarbonDataFileAttributes.java
b/processing/src/main/java/org/carbondata/processing/store/CarbonDataFileAttributes.java
index b17ed8d..e4f72b6 100644
--- a/processing/src/main/java/org/carbondata/processing/store/CarbonDataFileAttributes.java
+++ b/processing/src/main/java/org/carbondata/processing/store/CarbonDataFileAttributes.java
@@ -25,8 +25,6 @@ import org.carbondata.common.logging.LogService;
 import org.carbondata.common.logging.LogServiceFactory;
 import org.carbondata.core.constants.CarbonCommonConstants;
 
-import org.apache.spark.sql.columnar.TIMESTAMP;
-
 /**
  * This class contains attributes of file which are required to
  * construct file name like taskId, factTimeStamp
@@ -84,7 +82,7 @@ public class CarbonDataFileAttributes {
       dateToStr = parser.parse(factTimeStamp);
       return Long.toString(dateToStr.getTime());
     } catch (ParseException e) {
-      LOGGER.error("Cannot convert" + TIMESTAMP.toString()
+      LOGGER.error("Cannot convert" + factTimeStamp
           + " to Time/Long type value" + e.getMessage());
       return null;
     }


Mime
View raw message