carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ravipes...@apache.org
Subject [16/47] incubator-carbondata git commit: clean scala code style (#703)
Date Mon, 01 Aug 2016 10:05:14 GMT
clean scala code style (#703)

clean scala code style (#703)

Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/90893247
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/90893247
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/90893247

Branch: refs/heads/master
Commit: 90893247f3a07df2d5b5435014445f3945ff63e4
Parents: 9f269d1
Author: Gin-zhj <zhujin2@huawei.com>
Authored: Mon Jul 25 11:59:31 2016 +0800
Committer: david <qiangcai@qq.com>
Committed: Mon Jul 25 11:59:31 2016 +0800

----------------------------------------------------------------------
 .../spark/sql/common/util/CsvCompare.scala      |  1 -
 .../org/apache/spark/sql/CarbonSqlParser.scala  |  2 +-
 .../spark/sql/SparkUnknownExpression.scala      |  4 +-
 .../execution/command/carbonTableSchema.scala   | 20 +++---
 .../spark/rdd/CarbonDataLoadRDD.scala           |  2 +-
 .../spark/rdd/CarbonDataRDDFactory.scala        |  2 +-
 .../apache/spark/sql/TestCarbonSqlParser.scala  | 72 ++++++++++----------
 .../DataCompactionCardinalityBoundryTest.scala  |  6 +-
 .../datacompaction/DataCompactionTest.scala     |  6 +-
 .../dataload/TestLoadDataWithHiveSyntax.scala   |  4 +-
 ...estampDataTypeDirectDictionaryTestCase.scala |  3 +-
 ...TypeDirectDictionaryWithNoDictTestCase.scala |  3 +-
 .../TimestampDataTypeNullDataTest.scala         |  2 +-
 .../util/GlobalDictionaryUtilTestCase.scala     |  4 +-
 14 files changed, 64 insertions(+), 67 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration-testcases/src/test/scala/org/apache/spark/sql/common/util/CsvCompare.scala
----------------------------------------------------------------------
diff --git a/integration-testcases/src/test/scala/org/apache/spark/sql/common/util/CsvCompare.scala
b/integration-testcases/src/test/scala/org/apache/spark/sql/common/util/CsvCompare.scala
index 6035c84..7d8759d 100644
--- a/integration-testcases/src/test/scala/org/apache/spark/sql/common/util/CsvCompare.scala
+++ b/integration-testcases/src/test/scala/org/apache/spark/sql/common/util/CsvCompare.scala
@@ -24,7 +24,6 @@ class CsvCompare {
     val carbon = new HashMap[String, Integer]()
     val hivefile = new BufferedReader(new FileReader(hive))
     val olapfile = new BufferedReader(new FileReader(olap))
-    var line: String = null
     
  
       for(line <- Source.fromFile(file1).getLines())

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
index 9bc2bb9..7033b85 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
@@ -496,7 +496,7 @@ class CarbonSqlParser()
                                   tableProperties: Map[String, String]): tableModel
   = {
 
-    var (dims: Seq[Field], noDictionaryDims: Seq[String]) = extractDimColsAndNoDictionaryFields(
+    val (dims: Seq[Field], noDictionaryDims: Seq[String]) = extractDimColsAndNoDictionaryFields(
       fields, tableProperties)
     val msrs: Seq[Field] = extractMsrColsFromFields(fields, tableProperties)
 

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala
b/integration/spark/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala
index 35e1035..058cea5 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/SparkUnknownExpression.scala
@@ -90,7 +90,7 @@ class SparkUnknownExpression(sparkExp: SparkExpression)
   }
 
   def isSingleDimension(): Boolean = {
-    var lst = new java.util.ArrayList[ColumnExpression]()
+    val lst = new java.util.ArrayList[ColumnExpression]()
     getAllColumnListFromExpressionTree(sparkExp, lst)
     if (lst.size == 1 && lst.get(0).isDimension) {
       true
@@ -127,7 +127,7 @@ class SparkUnknownExpression(sparkExp: SparkExpression)
   }
 
   def isDirectDictionaryColumns(): Boolean = {
-    var lst = new ArrayList[ColumnExpression]()
+    val lst = new ArrayList[ColumnExpression]()
     getAllColumnListFromExpressionTree(sparkExp, lst)
     if (lst.get(0).getCarbonColumn.hasEncoding(Encoding.DIRECT_DICTIONARY)) {
       true

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
index 3a0e21c..0278a68 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/execution/command/carbonTableSchema.scala
@@ -204,7 +204,7 @@ class TableNewProcessor(cm: tableModel, sqlContext: SQLContext) {
     if (dataType == DataType.TIMESTAMP) {
       encoders.add(Encoding.DIRECT_DICTIONARY)
     }
-    var colPropMap = new java.util.HashMap[String, String]()
+    val colPropMap = new java.util.HashMap[String, String]()
     if (None != cm.colProps && null != cm.colProps.get.get(colName)) {
       val colProps = cm.colProps.get.get(colName)
       colProps.asScala.foreach { x => colPropMap.put(x.key, x.value) }
@@ -1197,7 +1197,7 @@ private[sql] case class AlterTableCompaction(alterTableModel: AlterTableModel)
e
 
   def run(sqlContext: SQLContext): Seq[Row] = {
     // TODO : Implement it.
-    var tableName = alterTableModel.tableName
+    val tableName = alterTableModel.tableName
     val schemaName = getDB.getDatabaseName(alterTableModel.dbName, sqlContext)
     if (null == org.carbondata.core.carbon.metadata.CarbonMetadata.getInstance
       .getCarbonTable(schemaName + "_" + tableName)) {
@@ -1354,7 +1354,7 @@ private[sql] case class DeleteLoadsById(
     }
     val path = carbonTable.getMetaDataFilepath
 
-    var segmentStatusManager =
+    val segmentStatusManager =
       new SegmentStatusManager(carbonTable.getAbsoluteTableIdentifier)
 
     val invalidLoadIds = segmentStatusManager.updateDeletionStatus(loadids.asJava, path).asScala
@@ -1413,9 +1413,9 @@ private[sql] case class DeleteLoadsByLoadDate(
       throw new MalformedCarbonCommandException(errorMessage)
     }
 
-    var carbonTable = org.carbondata.core.carbon.metadata.CarbonMetadata.getInstance()
+    val carbonTable = org.carbondata.core.carbon.metadata.CarbonMetadata.getInstance()
       .getCarbonTable(schemaName + '_' + tableName)
-    var segmentStatusManager = new SegmentStatusManager(carbonTable.getAbsoluteTableIdentifier)
+    val segmentStatusManager = new SegmentStatusManager(carbonTable.getAbsoluteTableIdentifier)
 
     if (null == carbonTable) {
       var relation = CarbonEnv.getInstance(sqlContext).carbonCatalog.lookupRelation1(
@@ -1424,9 +1424,9 @@ private[sql] case class DeleteLoadsByLoadDate(
         None
       )(sqlContext).asInstanceOf[CarbonRelation]
     }
-    var path = carbonTable.getMetaDataFilepath()
+    val path = carbonTable.getMetaDataFilepath()
 
-    var invalidLoadTimestamps = segmentStatusManager
+    val invalidLoadTimestamps = segmentStatusManager
       .updateDeletionStatus(loadDate, path, timeObj.asInstanceOf[java.lang.Long]).asScala
     if(invalidLoadTimestamps.isEmpty) {
       LOGGER.audit(s"Delete load by load date is successfull for $schemaName.$tableName.")
@@ -1505,7 +1505,7 @@ private[sql] case class LoadCube(
       // Need to fill dimension relation
       carbonLoadModel.setCarbonDataLoadSchema(dataLoadSchema)
       var storeLocation = ""
-      var configuredStore = CarbonLoaderUtil.getConfiguredLocalDirs(SparkEnv.get.conf)
+      val configuredStore = CarbonLoaderUtil.getConfiguredLocalDirs(SparkEnv.get.conf)
       if (null != configuredStore && configuredStore.length > 0) {
         storeLocation = configuredStore(Random.nextInt(configuredStore.length))
       }
@@ -1945,7 +1945,7 @@ private[sql] case class ShowLoads(
     }
     val path = carbonTable.getMetaDataFilepath()
 
-    var segmentStatusManager = new SegmentStatusManager(carbonTable.getAbsoluteTableIdentifier)
+    val segmentStatusManager = new SegmentStatusManager(carbonTable.getAbsoluteTableIdentifier)
 
     val loadMetadataDetailsArray = segmentStatusManager.readLoadMetadata(path)
 
@@ -2110,7 +2110,7 @@ private[sql] case class DeleteLoadByDate(
     val relation = CarbonEnv.getInstance(sqlContext).carbonCatalog
       .lookupRelation1(Some(schemaName), cubeName, None)(sqlContext).asInstanceOf[CarbonRelation]
     var level: String = ""
-    var carbonTable = org.carbondata.core.carbon.metadata.CarbonMetadata
+    val carbonTable = org.carbondata.core.carbon.metadata.CarbonMetadata
          .getInstance().getCarbonTable(schemaName + '_' + cubeName)
     if (relation == null) {
       LOGGER.audit(s"The delete load by date is failed. Table $schemaName.$cubeName does
not exist")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataLoadRDD.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataLoadRDD.scala
b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataLoadRDD.scala
index 18ea3b0..8869368 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataLoadRDD.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataLoadRDD.scala
@@ -174,7 +174,7 @@ class CarbonDataLoadRDD[K, V](
         CarbonProperties.getInstance().addProperty("high.cardinality.value", "100000")
         CarbonProperties.getInstance().addProperty("is.compressed.keyblock", "false")
         CarbonProperties.getInstance().addProperty("carbon.leaf.node.size", "120000")
-        var storeLocations = CarbonLoaderUtil.getConfiguredLocalDirs(SparkEnv.get.conf)
+        val storeLocations = CarbonLoaderUtil.getConfiguredLocalDirs(SparkEnv.get.conf)
         if (null != storeLocations && storeLocations.length > 0) {
           storeLocation = storeLocations(Random.nextInt(storeLocations.length))
         }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
index b314f88..7726739 100644
--- a/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
+++ b/integration/spark/src/main/scala/org/carbondata/spark/rdd/CarbonDataRDDFactory.scala
@@ -533,7 +533,7 @@ object CarbonDataRDDFactory extends Logging {
           )
 
         var storeLocation = ""
-        var configuredStore = CarbonLoaderUtil.getConfiguredLocalDirs(SparkEnv.get.conf)
+        val configuredStore = CarbonLoaderUtil.getConfiguredLocalDirs(SparkEnv.get.conf)
         if (null != configuredStore && configuredStore.length > 0) {
           storeLocation = configuredStore(Random.nextInt(configuredStore.length))
         }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/test/scala/org/apache/spark/sql/TestCarbonSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/apache/spark/sql/TestCarbonSqlParser.scala
b/integration/spark/src/test/scala/org/apache/spark/sql/TestCarbonSqlParser.scala
index 983d488..23c4a9d 100644
--- a/integration/spark/src/test/scala/org/apache/spark/sql/TestCarbonSqlParser.scala
+++ b/integration/spark/src/test/scala/org/apache/spark/sql/TestCarbonSqlParser.scala
@@ -71,7 +71,7 @@ class TestCarbonSqlParser extends QueryTest {
   test("Test-updateColumnGroupsInField") {
     val colGroupStr = "(col2,col3),(col5,col6),(col7,col8)"
     val tableProperties = Map(CarbonCommonConstants.COLUMN_GROUPS -> colGroupStr)
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
     val colgrps = stub.updateColumnGroupsInFieldTest(fields, tableProperties)
     assert(colgrps.lift(0).get.equalsIgnoreCase("col2,col3"))
@@ -93,7 +93,7 @@ class TestCarbonSqlParser extends QueryTest {
   test("Test-ColumnGroupsInvalidField_Shouldnotallow") {
     val colGroupStr = "(col1,col2),(col10,col6),(col7,col8)"
     val tableProperties = Map(CarbonCommonConstants.COLUMN_GROUPS -> colGroupStr)
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
     try {
       val colgrps = stub.updateColumnGroupsInFieldTest(fields, tableProperties)
@@ -106,7 +106,7 @@ class TestCarbonSqlParser extends QueryTest {
     //col1 is measure
     val colGroupStr = "(col1,col2),(col5,col6),(col7,col8)"
     val tableProperties = Map(CarbonCommonConstants.COLUMN_GROUPS -> colGroupStr)
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
     try {
       val colgrps = stub.updateColumnGroupsInFieldTest(fields, tableProperties)
@@ -120,7 +120,7 @@ class TestCarbonSqlParser extends QueryTest {
     val colGroupStr = "(col2,col3),(col5,col6),(col7,col8)"
     val noDictStr = "col5"
     val tableProperties = Map(CarbonCommonConstants.COLUMN_GROUPS -> colGroupStr, CarbonCommonConstants.DICTIONARY_EXCLUDE
-> noDictStr)
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
     try {
       val colgrps = stub.updateColumnGroupsInFieldTest(fields, tableProperties)
@@ -132,7 +132,7 @@ class TestCarbonSqlParser extends QueryTest {
   test("Test-SameColumnInDifferentGroup_ShouldNotAllow") {
     val colGroupStr = "(col2,col3),(col5,col6),(col6,col7,col8)"
     val tableProperties = Map(CarbonCommonConstants.COLUMN_GROUPS -> colGroupStr)
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
     try {
       val colgrps = stub.updateColumnGroupsInFieldTest(fields, tableProperties)
@@ -145,7 +145,7 @@ class TestCarbonSqlParser extends QueryTest {
    test("Test-ColumnAreNotTogetherAsInSchema_ShouldNotAllow") {
     val colGroupStr = "(col2,col3),(col5,col8)"
     val tableProperties = Map(CarbonCommonConstants.COLUMN_GROUPS -> colGroupStr)
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
     try {
       val colgrps = stub.updateColumnGroupsInFieldTest(fields, tableProperties)
@@ -157,7 +157,7 @@ class TestCarbonSqlParser extends QueryTest {
   test("Test-ColumnInColumnGroupAreShuffledButInSequence") {
     val colGroupStr = "(col2,col3),(col7,col8,col6)"
     val tableProperties = Map(CarbonCommonConstants.COLUMN_GROUPS -> colGroupStr)
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
     
     val colgrps = stub.updateColumnGroupsInFieldTest(fields, tableProperties)
@@ -167,7 +167,7 @@ class TestCarbonSqlParser extends QueryTest {
   // Testing the column group Splitting method with empty table properties so null will be
returned.
   test("Test-Empty-updateColumnGroupsInField") {
     val tableProperties = Map("" -> "")
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
     val colgrps = stub.updateColumnGroupsInFieldTest(fields, Map())
     //assert( rtn === 1)
@@ -177,10 +177,10 @@ class TestCarbonSqlParser extends QueryTest {
   // Testing the extracting of Dims and no Dictionary
   test("Test-extractDimColsAndNoDictionaryFields") {
     val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE -> "col2", CarbonCommonConstants.DICTIONARY_INCLUDE
-> "col4")
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
 
     val stub = new TestCarbonSqlParserStub()
-    var (dimCols, noDictionary) = stub.extractDimColsAndNoDictionaryFieldsTest(fields, tableProperties)
+    val (dimCols, noDictionary) = stub.extractDimColsAndNoDictionaryFieldsTest(fields, tableProperties)
 
     // testing col
 
@@ -197,11 +197,11 @@ class TestCarbonSqlParser extends QueryTest {
 
   test("Test-DimAndMsrColsWithNoDictionaryFields1") {
     val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE -> "col1")
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
-    var (dimCols, noDictionary) = stub
+    val (dimCols, noDictionary) = stub
       .extractDimColsAndNoDictionaryFieldsTest(fields, tableProperties)
-    var msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
+    val msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
 
     //below fields should be available in dimensions list
     assert(dimCols.size == 7)
@@ -220,11 +220,11 @@ class TestCarbonSqlParser extends QueryTest {
 
   test("Test-DimAndMsrColsWithNoDictionaryFields2") {
     val tableProperties = Map(CarbonCommonConstants.DICTIONARY_INCLUDE -> "col1")
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
-    var (dimCols, noDictionary) = stub
+    val (dimCols, noDictionary) = stub
       .extractDimColsAndNoDictionaryFieldsTest(fields, tableProperties)
-    var msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
+    val msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
 
     //below dimension fields should be available in dimensions list
     assert(dimCols.size == 7)
@@ -242,11 +242,11 @@ class TestCarbonSqlParser extends QueryTest {
 
   test("Test-DimAndMsrColsWithNoDictionaryFields3") {
     val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE -> "col1", CarbonCommonConstants.DICTIONARY_INCLUDE
-> "col4")
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
-    var (dimCols, noDictionary) = stub
+    val (dimCols, noDictionary) = stub
       .extractDimColsAndNoDictionaryFieldsTest(fields, tableProperties)
-    var msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
+    val msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
 
     //below dimension fields should be available in dimensions list
     assert(dimCols.size == 8)
@@ -265,11 +265,11 @@ class TestCarbonSqlParser extends QueryTest {
 
   test("Test-DimAndMsrColsWithNoDictionaryFields4") {
     val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE -> "col3", CarbonCommonConstants.DICTIONARY_INCLUDE
-> "col2")
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
-    var (dimCols, noDictionary) = stub
+    val (dimCols, noDictionary) = stub
       .extractDimColsAndNoDictionaryFieldsTest(fields, tableProperties)
-    var msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
+    val msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
 
     //below dimension fields should be available in dimensions list
     assert(dimCols.size == 6)
@@ -288,11 +288,11 @@ class TestCarbonSqlParser extends QueryTest {
 
   test("Test-DimAndMsrColsWithNoDictionaryFields5") {
     val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE -> "col4", CarbonCommonConstants.DICTIONARY_INCLUDE
-> "col2")
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
-    var (dimCols, noDictionary) = stub
+    val (dimCols, noDictionary) = stub
       .extractDimColsAndNoDictionaryFieldsTest(fields, tableProperties)
-    var msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
+    val msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
 
     //below dimension fields should be available in dimensions list
     assert(dimCols.size == 7)
@@ -311,11 +311,11 @@ class TestCarbonSqlParser extends QueryTest {
 
   test("Test-DimAndMsrColsWithNoDictionaryFields6") {
     val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE -> "col2", CarbonCommonConstants.DICTIONARY_INCLUDE
-> "col1")
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
-    var (dimCols, noDictionary) = stub
+    val (dimCols, noDictionary) = stub
       .extractDimColsAndNoDictionaryFieldsTest(fields, tableProperties)
-    var msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
+    val msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
 
     //below dimension fields should be available in dimensions list
     assert(dimCols.size == 7)
@@ -336,11 +336,11 @@ class TestCarbonSqlParser extends QueryTest {
     val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE -> "col2 ,col1
 ",
       CarbonCommonConstants.DICTIONARY_INCLUDE -> "col3 ,col4 "
     )
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
-    var (dimCols, noDictionary) = stub
+    val (dimCols, noDictionary) = stub
       .extractDimColsAndNoDictionaryFieldsTest(fields, tableProperties)
-    var msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
+    val msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
 
     //below dimension fields should be available in dimensions list
     assert(dimCols.size == 8)
@@ -360,11 +360,11 @@ class TestCarbonSqlParser extends QueryTest {
 
   test("Test-DimAndMsrColsWithNoDictionaryFields8") {
     val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE-> "col2,col4",
CarbonCommonConstants.DICTIONARY_INCLUDE -> "col3")
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
-    var (dimCols, noDictionary) = stub
+    val (dimCols, noDictionary) = stub
       .extractDimColsAndNoDictionaryFieldsTest(fields, tableProperties)
-    var msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
+    val msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
 
     //below dimension fields should be available in dimensions list
     assert(dimCols.size == 7)
@@ -385,9 +385,9 @@ class TestCarbonSqlParser extends QueryTest {
   // Testing the extracting of measures
   test("Test-extractMsrColsFromFields") {
     val tableProperties = Map(CarbonCommonConstants.DICTIONARY_EXCLUDE -> "col2", CarbonCommonConstants.DICTIONARY_INCLUDE
-> "col4")
-    var fields: Seq[Field] = loadAllFields
+    val fields: Seq[Field] = loadAllFields
     val stub = new TestCarbonSqlParserStub()
-    var msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
+    val msrCols = stub.extractMsrColsFromFieldsTest(fields, tableProperties)
 
     // testing col
     assert(msrCols.lift(0).get.column.equalsIgnoreCase("col1"))

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionCardinalityBoundryTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionCardinalityBoundryTest.scala
b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionCardinalityBoundryTest.scala
index f5fc8e9..7eeaffb 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionCardinalityBoundryTest.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionCardinalityBoundryTest.scala
@@ -33,12 +33,12 @@ class DataCompactionCardinalityBoundryTest extends QueryTest with BeforeAndAfter
 
     val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
       .getCanonicalPath
-    var csvFilePath1 = currentDirectory + "/src/test/resources/compaction/compaction1.csv"
+    val csvFilePath1 = currentDirectory + "/src/test/resources/compaction/compaction1.csv"
 
     // loading the rows greater than 256. so that the column cardinality crosses byte boundary.
-    var csvFilePath2 = currentDirectory + "/src/test/resources/compaction/compactioncard2.csv"
+    val csvFilePath2 = currentDirectory + "/src/test/resources/compaction/compactioncard2.csv"
 
-    var csvFilePath3 = currentDirectory + "/src/test/resources/compaction/compaction3.csv"
+    val csvFilePath3 = currentDirectory + "/src/test/resources/compaction/compaction3.csv"
 
 
     sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE cardinalityTest OPTIONS"
+

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionTest.scala
b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionTest.scala
index 9f87ada..1320b39 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionTest.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/datacompaction/DataCompactionTest.scala
@@ -34,10 +34,10 @@ class DataCompactionTest extends QueryTest with BeforeAndAfterAll {
 
     val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
       .getCanonicalPath
-    var csvFilePath1 = currentDirectory + "/src/test/resources/compaction/compaction1.csv"
+    val csvFilePath1 = currentDirectory + "/src/test/resources/compaction/compaction1.csv"
 
-    var csvFilePath2 = currentDirectory + "/src/test/resources/compaction/compaction2.csv"
-    var csvFilePath3 = currentDirectory + "/src/test/resources/compaction/compaction3.csv"
+    val csvFilePath2 = currentDirectory + "/src/test/resources/compaction/compaction2.csv"
+    val csvFilePath3 = currentDirectory + "/src/test/resources/compaction/compaction3.csv"
 
     sql("LOAD DATA LOCAL INPATH '" + csvFilePath1 + "' INTO TABLE normalcompaction OPTIONS"
+
       "('DELIMITER'= ',', 'QUOTECHAR'= '\"')"

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
index 99bae17..173fe6b 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntax.scala
@@ -216,7 +216,7 @@ class TestLoadDataWithHiveSyntax extends QueryTest with BeforeAndAfterAll
{
     )
     val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
       .getCanonicalPath
-    var csvFilePath = currentDirectory + "/src/test/resources/data_withCAPSHeader.csv"
+    val csvFilePath = currentDirectory + "/src/test/resources/data_withCAPSHeader.csv"
     sql("LOAD DATA local inpath '" + csvFilePath + "' INTO table header_test OPTIONS " +
       "('DELIMITER'=',', 'QUOTECHAR'='\"')");
     checkAnswer(sql("select empno from header_test"),
@@ -248,7 +248,7 @@ class TestLoadDataWithHiveSyntax extends QueryTest with BeforeAndAfterAll
{
     )
     val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
       .getCanonicalPath
-    var csvFilePath = currentDirectory + "/src/test/resources/data_withMixedHeader.csv"
+    val csvFilePath = currentDirectory + "/src/test/resources/data_withMixedHeader.csv"
     sql("LOAD DATA local inpath '" + csvFilePath + "' INTO table mixed_header_test OPTIONS
" +
       "('DELIMITER'=',', 'QUOTECHAR'='\"')");
     checkAnswer(sql("select empno from mixed_header_test"),

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
index 606e04b..baff810 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryTestCase.scala
@@ -61,10 +61,9 @@ class TimestampDataTypeDirectDictionaryTest extends QueryTest with BeforeAndAfte
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd HH:mm:ss")
       val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
         .getCanonicalPath
-      var csvFilePath = currentDirectory + "/src/test/resources/datasample.csv"
+      val csvFilePath = currentDirectory + "/src/test/resources/datasample.csv"
       sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE directDictionaryCube OPTIONS"
+
         "('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
-
     } catch {
       case x: Throwable => CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryWithNoDictTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryWithNoDictTestCase.scala
b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryWithNoDictTestCase.scala
index dafe8ee..a62c632 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryWithNoDictTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeDirectDictionaryWithNoDictTestCase.scala
@@ -61,10 +61,9 @@ class TimestampDataTypeDirectDictionaryWithNoDictTestCase extends QueryTest
with
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy-MM-dd HH:mm:ss")
       val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
         .getCanonicalPath
-      var csvFilePath = currentDirectory + "/src/test/resources/datasample.csv"
+      val csvFilePath = currentDirectory + "/src/test/resources/datasample.csv"
       sql("LOAD DATA local inpath '" + csvFilePath + "' INTO TABLE directDictionaryTable
OPTIONS"
         + "('DELIMITER'= ',', 'QUOTECHAR'= '\"')");
-
     } catch {
       case x: Throwable => CarbonProperties.getInstance()
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy")

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeNullDataTest.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeNullDataTest.scala
b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeNullDataTest.scala
index c8c1f81..5260fe7 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeNullDataTest.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/testsuite/directdictionary/TimestampDataTypeNullDataTest.scala
@@ -59,7 +59,7 @@ class TimestampDataTypeNullDataTest extends QueryTest with BeforeAndAfterAll
{
         .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/mm/dd")
       val currentDirectory = new File(this.getClass.getResource("/").getPath + "/../../")
         .getCanonicalPath
-      var csvFilePath = currentDirectory + "/src/test/resources/datasamplenull.csv"
+      val csvFilePath = currentDirectory + "/src/test/resources/datasamplenull.csv"
       sql("LOAD DATA LOCAL INPATH '" + csvFilePath + "' INTO TABLE timestampTyeNullData").collect();
 
     } catch {

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/90893247/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilTestCase.scala
b/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilTestCase.scala
index f1a076b..4d68e98 100644
--- a/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilTestCase.scala
+++ b/integration/spark/src/test/scala/org/carbondata/spark/util/GlobalDictionaryUtilTestCase.scala
@@ -153,7 +153,7 @@ class GlobalDictionaryUtilTestCase extends QueryTest with BeforeAndAfterAll
{
 
   test("[issue-80]Global Dictionary Generation") {
 
-    var carbonLoadModel = buildCarbonLoadModel(sampleRelation, filePath, null, null)
+    val carbonLoadModel = buildCarbonLoadModel(sampleRelation, filePath, null, null)
     GlobalDictionaryUtil
       .generateGlobalDictionary(CarbonHiveContext,
         carbonLoadModel,
@@ -171,7 +171,7 @@ class GlobalDictionaryUtilTestCase extends QueryTest with BeforeAndAfterAll
{
   test("[Issue-190]load csv file without header And support complex type") {
     val header = "deviceInformationId,channelsId,ROMSize,purchasedate,mobile,MAC,locationinfo,"
+
       "proddate,gamePointId,contractNumber"
-    var carbonLoadModel = buildCarbonLoadModel(complexRelation, complexfilePath, null, header)
+    val carbonLoadModel = buildCarbonLoadModel(complexRelation, complexfilePath, null, header)
     GlobalDictionaryUtil
       .generateGlobalDictionary(CarbonHiveContext,
         carbonLoadModel,


Mime
View raw message