carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ravipes...@apache.org
Subject [1/2] incubator-carbondata git commit: fix carbonexample for spark2
Date Mon, 05 Dec 2016 13:30:13 GMT
Repository: incubator-carbondata
Updated Branches:
  refs/heads/master 0a2841a2f -> 7277355a9


fix carbonexample for spark2

fix style


Project: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/commit/5b29cd9b
Tree: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/tree/5b29cd9b
Diff: http://git-wip-us.apache.org/repos/asf/incubator-carbondata/diff/5b29cd9b

Branch: refs/heads/master
Commit: 5b29cd9ba48103e9a77c087d38495eda1c1ba47b
Parents: 0a2841a
Author: jackylk <jacky.likun@huawei.com>
Authored: Mon Dec 5 20:51:50 2016 +0800
Committer: ravipesala <ravi.pesala@gmail.com>
Committed: Mon Dec 5 18:59:13 2016 +0530

----------------------------------------------------------------------
 .../src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala     | 2 +-
 .../carbondata/spark/readsupport/SparkRowReadSupportImpl.java     | 2 +-
 .../spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala    | 3 ++-
 3 files changed, 4 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b29cd9b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
----------------------------------------------------------------------
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
index b7f8b90..3f371aa 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSqlParser.scala
@@ -1160,7 +1160,7 @@ class CarbonSqlParser() extends AbstractSparkSQLParser {
       case "string" => Field(field.column, Some("String"), field.name, Some(null), field.parent,
         field.storeType, field.schemaOrdinal
       )
-      case "smallint"  => Field(field.column, Some("SmallInt"), field.name, Some(null),
+      case "smallint" => Field(field.column, Some("SmallInt"), field.name, Some(null),
         field.parent, field.storeType, field.schemaOrdinal
       )
       case "integer" | "int" => Field(field.column, Some("Integer"), field.name, Some(null),

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b29cd9b/integration/spark2/src/main/java/org/apache/carbondata/spark/readsupport/SparkRowReadSupportImpl.java
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/java/org/apache/carbondata/spark/readsupport/SparkRowReadSupportImpl.java
b/integration/spark2/src/main/java/org/apache/carbondata/spark/readsupport/SparkRowReadSupportImpl.java
index c2f5d5d..65d583c 100644
--- a/integration/spark2/src/main/java/org/apache/carbondata/spark/readsupport/SparkRowReadSupportImpl.java
+++ b/integration/spark2/src/main/java/org/apache/carbondata/spark/readsupport/SparkRowReadSupportImpl.java
@@ -48,7 +48,7 @@ public class SparkRowReadSupportImpl extends AbstractDictionaryDecodedReadSuppor
         } else if(dataTypes[i].equals(DataType.INT)) {
           data[i] = ((Long)(data[i])).intValue();
         } else if(dataTypes[i].equals(DataType.SHORT)) {
-          data[i] = ((Double)(data[i])).shortValue();
+          data[i] = ((Long)(data[i])).shortValue();
         }
       }
     }

http://git-wip-us.apache.org/repos/asf/incubator-carbondata/blob/5b29cd9b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
index 2dee263..80f2a07 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/CarbonEnv.scala
@@ -24,6 +24,7 @@ import org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.hadoop.readsupport.impl.RawDataReadSupport
 import org.apache.carbondata.spark.rdd.SparkCommonEnv
+import org.apache.carbondata.spark.readsupport.SparkRowReadSupportImpl
 
 /**
  * Carbon Environment for unified context
@@ -57,7 +58,7 @@ object CarbonEnv {
   }
 
   private def setSparkCommonEnv(sqlContext: SQLContext): Unit = {
-    SparkCommonEnv.readSupportClass = classOf[RawDataReadSupport]
+    SparkCommonEnv.readSupportClass = classOf[SparkRowReadSupportImpl]
     SparkCommonEnv.numExistingExecutors = sqlContext.sparkContext.schedulerBackend match
{
       case b: CoarseGrainedSchedulerBackend => b.getExecutorIds().length
       case _ => 0


Mime
View raw message