carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jack...@apache.org
Subject [1/4] carbondata git commit: [CARBONDATA-1994] Remove CarbonInputFormat [Forced Update!]
Date Sun, 14 Jan 2018 13:43:02 GMT
Repository: carbondata
Updated Branches:
  refs/heads/carbonstore 580be7d8a -> 177d6d26c (forced update)


http://git-wip-us.apache.org/repos/asf/carbondata/blob/a324e5db/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataRecordSetProvider.java
----------------------------------------------------------------------
diff --git a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataRecordSetProvider.java
b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataRecordSetProvider.java
index 65c7373..f039daf 100755
--- a/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataRecordSetProvider.java
+++ b/integration/presto/src/main/java/org/apache/carbondata/presto/CarbondataRecordSetProvider.java
@@ -119,17 +119,13 @@ public class CarbondataRecordSetProvider implements ConnectorRecordSetProvider
{
 
     AbsoluteTableIdentifier identifier = carbonTable.getAbsoluteTableIdentifier();
     CarbonTableInputFormat format = new CarbonTableInputFormat<Object>();
-    try {
-      CarbonTableInputFormat
-          .setTablePath(conf, identifier.appendWithLocalPrefix(identifier.getTablePath()));
-      CarbonTableInputFormat
-          .setDatabaseName(conf, identifier.getCarbonTableIdentifier().getDatabaseName());
-      CarbonTableInputFormat
-          .setTableName(conf, identifier.getCarbonTableIdentifier().getTableName());
-    } catch (IOException e) {
-      throw new RuntimeException("Unable to create the CarbonTableInputFormat", e);
-    }
-    CarbonTableInputFormat.setFilterPredicates(conf, filterExpression);
+    CarbonTableInputFormat
+        .setTablePath(conf, identifier.appendWithLocalPrefix(identifier.getTablePath()));
+    CarbonTableInputFormat
+        .setDatabaseName(conf, identifier.getCarbonTableIdentifier().getDatabaseName());
+    CarbonTableInputFormat
+        .setTableName(conf, identifier.getCarbonTableIdentifier().getTableName());
+  CarbonTableInputFormat.setFilterPredicates(conf, filterExpression);
     CarbonTableInputFormat.setColumnProjection(conf, projection);
 
     return format;

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a324e5db/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dblocation/DBLocationCarbonTableTestCase.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dblocation/DBLocationCarbonTableTestCase.scala
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dblocation/DBLocationCarbonTableTestCase.scala
index 6f3191d..8e8935f 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dblocation/DBLocationCarbonTableTestCase.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/spark/testsuite/dblocation/DBLocationCarbonTableTestCase.scala
@@ -44,6 +44,24 @@ class DBLocationCarbonTableTestCase extends QueryTest with BeforeAndAfterAll
{
     sql("drop database if exists carbon cascade")
   }
 
+  //TODO fix this test case
+  test("Update operation on carbon table with insert into") {
+    sql("drop database if exists carbon2 cascade")
+    sql(s"create database carbon2 location '$dblocation'")
+    sql("use carbon2")
+    sql("""create table carbontable (c1 string,c2 int,c3 string,c5 string) STORED BY 'org.apache.carbondata.format'""")
+    sql("insert into carbontable select 'a',1,'aa','aaa'")
+    sql("insert into carbontable select 'b',1,'bb','bbb'")
+    // update operation
+    sql("""update carbontable d  set (d.c2) = (d.c2 + 1) where d.c1 = 'a'""").show()
+    sql("""update carbontable d  set (d.c2) = (d.c2 + 1) where d.c1 = 'b'""").show()
+    checkAnswer(
+      sql("""select c1,c2,c3,c5 from carbontable"""),
+      Seq(Row("a",2,"aa","aaa"),Row("b",2,"bb","bbb"))
+    )
+    sql("drop database if exists carbon2 cascade")
+  }
+
   test("create and drop database test") {
     sql(s"create database carbon location '$dblocation'")
     sql("drop database if exists carbon cascade")
@@ -89,23 +107,6 @@ class DBLocationCarbonTableTestCase extends QueryTest with BeforeAndAfterAll
{
     checkAnswer(sql("select c1 from carbontable"), Seq(Row("a"), Row("b")))
   }
 
-  //TODO fix this test case
-  test("Update operation on carbon table with insert into") {
-    sql("drop database if exists carbon cascade")
-    sql(s"create database carbon location '$dblocation'")
-    sql("use carbon")
-    sql("""create table carbon.carbontable (c1 string,c2 int,c3 string,c5 string) STORED
BY 'org.apache.carbondata.format'""")
-    sql("insert into carbontable select 'a',1,'aa','aaa'")
-    sql("insert into carbontable select 'b',1,'bb','bbb'")
-    // update operation
-    sql("""update carbon.carbontable d  set (d.c2) = (d.c2 + 1) where d.c1 = 'a'""").show()
-    sql("""update carbon.carbontable d  set (d.c2) = (d.c2 + 1) where d.c1 = 'b'""").show()
-    checkAnswer(
-      sql("""select c1,c2,c3,c5 from carbon.carbontable"""),
-      Seq(Row("a",2,"aa","aaa"),Row("b",2,"bb","bbb"))
-    )
-  }
-
 
   test("Update operation on carbon table") {
     sql("drop database if exists carbon cascade")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/a324e5db/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
index 994880c..a8efb84 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/mutation/DeleteExecution.scala
@@ -42,7 +42,6 @@ import org.apache.carbondata.core.mutate.data.RowCountDetailsVO
 import org.apache.carbondata.core.statusmanager.{SegmentStatus, SegmentUpdateStatusManager}
 import org.apache.carbondata.core.util.path.{CarbonStorePath, CarbonTablePath}
 import org.apache.carbondata.core.writer.CarbonDeleteDeltaWriterImpl
-import org.apache.carbondata.hadoop.CarbonInputFormat
 import org.apache.carbondata.hadoop.api.CarbonTableInputFormat
 import org.apache.carbondata.processing.exception.MultipleMatchingException
 import org.apache.carbondata.processing.loading.FailureCauses
@@ -86,7 +85,7 @@ object DeleteExecution {
     }
 
     val (carbonInputFormat, job) = createCarbonInputFormat(absoluteTableIdentifier)
-    CarbonInputFormat.setTableInfo(job.getConfiguration, carbonTable.getTableInfo)
+    CarbonTableInputFormat.setTableInfo(job.getConfiguration, carbonTable.getTableInfo)
     val keyRdd = deleteRdd.map({ row =>
       val tupleId: String = row
         .getString(row.fieldIndex(CarbonCommonConstants.CARBON_IMPLICIT_COLUMN_TUPLEID))


Mime
View raw message