carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ajan...@apache.org
Subject [carbondata] branch master updated: [CARBONDATA-3741] Fix ParseException from hive during ALTER SET TBLPROERTIES if database name starts with Underscore
Date Tue, 17 Mar 2020 04:40:52 GMT
This is an automated email from the ASF dual-hosted git repository.

ajantha pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new 5a715ce  [CARBONDATA-3741] Fix ParseException from hive during ALTER SET TBLPROERTIES
if database name starts with Underscore
5a715ce is described below

commit 5a715cef7a1e191b08a2116d9b88aabea256dd77
Author: Indhumathi27 <indhumathim27@gmail.com>
AuthorDate: Fri Mar 13 15:17:59 2020 +0530

    [CARBONDATA-3741] Fix ParseException from hive during ALTER SET TBLPROERTIES if database
name starts with Underscore
    
    Why is this PR needed?
    After creating table, on next query, we will fire alter query to alter schema if table
has index table or not. If database name contains underscore as first letter, hive alter query
fails with parsing exception.
    
    What changes were proposed in this PR?
    Format Database name with `` , before firing the query to avoid parsing exception
    
    Does this PR introduce any user interface change?
    No
    
    Is any new testcase added?
    Yes
    
    This closes #3668
---
 .../org/apache/spark/sql/hive/CarbonHiveMetaStore.scala    |  2 +-
 .../apache/spark/sql/hive/CarbonSessionCatalogUtil.scala   |  4 ++--
 .../allqueries/InsertIntoCarbonTableSpark2TestCase.scala   | 14 ++++++++++++++
 3 files changed, 17 insertions(+), 3 deletions(-)

diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
index b0a0d47..6cf7551 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
@@ -161,7 +161,7 @@ class CarbonHiveMetaStore extends CarbonFileMetastore {
       .catalog
       .externalCatalog.asInstanceOf[HiveExternalCatalog]
       .client
-    hiveClient.runSqlHive(s"ALTER TABLE $dbName.$tableName SET SERDEPROPERTIES($schemaParts)")
+    hiveClient.runSqlHive(s"ALTER TABLE `$dbName`.`$tableName` SET SERDEPROPERTIES($schemaParts)")
 
     sparkSession.catalog.refreshTable(TableIdentifier(tableName, Some(dbName)).quotedString)
     removeTableFromMetadata(dbName, tableName)
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalogUtil.scala
b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalogUtil.scala
index f6a5475..db66acc 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalogUtil.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/hive/CarbonSessionCatalogUtil.scala
@@ -72,8 +72,8 @@ object CarbonSessionCatalogUtil {
       cols: Option[Seq[ColumnSchema]],
       sparkSession: SparkSession): Unit = {
     getClient(sparkSession)
-      .runSqlHive(s"ALTER TABLE ${ tableIdentifier.database.get }.${ tableIdentifier.table
} " +
-                  s"SET TBLPROPERTIES(${ schemaParts })")
+      .runSqlHive(s"ALTER TABLE `${tableIdentifier.database.get}`.`${tableIdentifier.table}`
" +
+                  s"SET TBLPROPERTIES($schemaParts)")
   }
 
   def alterTableProperties(
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala
index 7dafaf7..f4a60f0 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/allqueries/InsertIntoCarbonTableSpark2TestCase.scala
@@ -31,7 +31,21 @@ class InsertIntoCarbonTableSpark2TestCase extends QueryTest with BeforeAndAfterA
     checkAnswer(sql("select * from OneRowTable"), Seq(Row("0.1", "a.b", 1, 1.2)))
   }
 
+  test("test insert into with database name having underscore") {
+    sql("drop table if exists OneRowTable")
+    sql("create table OneRowTable(col1 string, col2 string, col3 int, col4 double) STORED
AS carbondata")
+    sql("insert into OneRowTable select '0.1', 'a.b', 1, 1.2")
+    checkAnswer(sql("select * from OneRowTable"), Seq(Row("0.1", "a.b", 1, 1.2)))
+    sql("drop database if exists _default cascade")
+    sql("create database _default")
+    sql("create table _default._OneRowTable(col1 string, col2 string, col3 int, col4 double)
STORED AS carbondata")
+    sql("insert into _default._OneRowTable select * from OneRowTable")
+    checkAnswer(sql("select * from _default._OneRowTable"), Seq(Row("0.1", "a.b", 1, 1.2)))
+    sql("drop database if exists _default cascade")
+  }
+
   override def afterAll {
     sql("drop table if exists OneRowTable")
+    sql("drop database if exists _default cascade")
   }
 }


Mime
View raw message