carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ajan...@apache.org
Subject [carbondata] branch master updated: [CARBONDATA-3897] Fixed external table with location creation and deleting issues
Date Mon, 13 Jul 2020 14:35:35 GMT
This is an automated email from the ASF dual-hosted git repository.

ajantha pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/carbondata.git


The following commit(s) were added to refs/heads/master by this push:
     new 74ccf4a  [CARBONDATA-3897] Fixed external table with location creation and deleting
issues
74ccf4a is described below

commit 74ccf4af825188b251907f508c6761f05c5e5506
Author: kunal642 <kunalkapoor642@gmail.com>
AuthorDate: Sun Jul 12 18:00:06 2020 +0530

    [CARBONDATA-3897] Fixed external table with location creation and deleting issues
    
    Why is this PR needed?
    Creating external table without Fs schema fails on HDFS
    Desc formatted on an external table throws StackOverflowException
    
    What changes were proposed in this PR?
    Added default FS Schema to the table path
    Added a check for non-transactional table in refreshIndexInfo method.
    
    Does this PR introduce any user interface change?
    No
    
    Is any new testcase added?
    No
    
    This closes #3836
---
 .../java/org/apache/carbondata/core/datastore/impl/FileFactory.java  | 5 +++--
 .../spark/src/main/scala/org/apache/spark/sql/CarbonSource.scala     | 4 ++--
 .../scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala | 5 ++++-
 .../spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala      | 4 ++++
 .../testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala | 4 ++--
 5 files changed, 15 insertions(+), 7 deletions(-)

diff --git a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
index 2fac335..6fe3242 100644
--- a/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
+++ b/core/src/main/java/org/apache/carbondata/core/datastore/impl/FileFactory.java
@@ -620,8 +620,9 @@ public final class FileFactory {
     }
 
     final String lowerPath = path.toLowerCase(Locale.getDefault());
-    return lowerPath.contains("://") || lowerPath.startsWith(CarbonCommonConstants.HDFSURL_PREFIX)
-        || lowerPath.startsWith(CarbonCommonConstants.VIEWFSURL_PREFIX) || lowerPath
+    return lowerPath.contains("file:/") || lowerPath.contains("://") || lowerPath
+        .startsWith(CarbonCommonConstants.HDFSURL_PREFIX) || lowerPath
+        .startsWith(CarbonCommonConstants.VIEWFSURL_PREFIX) || lowerPath
         .startsWith(CarbonCommonConstants.LOCAL_FILE_PREFIX) || lowerPath
         .startsWith(CarbonCommonConstants.ALLUXIOURL_PREFIX) || lowerPath
         .startsWith(CarbonCommonConstants.S3N_PREFIX) || lowerPath
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSource.scala b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSource.scala
index 4e8a00f..060de42 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSource.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/CarbonSource.scala
@@ -241,13 +241,13 @@ object CarbonSource {
     } else {
       None
     }
-    val tablePath = CarbonEnv.createTablePath(
+    val tablePath = CarbonUtil.checkAndAppendFileSystemURIScheme(CarbonEnv.createTablePath(
       Some(tableInfo.getDatabaseName),
       tableInfo.getFactTable.getTableName,
       tableInfo.getFactTable.getTableId,
       tableLocation,
       table.tableType == CatalogTableType.EXTERNAL,
-      tableInfo.isTransactionalTable)(sparkSession)
+      tableInfo.isTransactionalTable)(sparkSession))
     tableInfo.setTablePath(tablePath)
     CarbonSparkSqlParserUtil.validateTableProperties(tableInfo)
     val schemaEvolutionEntry = new SchemaEvolutionEntry
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
index 5bfd68a..694a9bc 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/parser/CarbonSparkSqlParserUtil.scala
@@ -383,7 +383,10 @@ object CarbonSparkSqlParserUtil {
     var isTransactionalTable: Boolean = true
     // table must convert database name and table name to lower case
     val identifier = AbsoluteTableIdentifier.from(
-      table.storage.locationUri.map(CatalogUtils.URIToString).getOrElse(""),
+      CarbonUtil.checkAndAppendFileSystemURIScheme(table.storage
+        .locationUri
+        .map(CatalogUtils.URIToString)
+        .getOrElse("")),
       CarbonEnv.getDatabaseName(table.identifier.database)(sparkSession).toLowerCase(),
       table.identifier.table.toLowerCase()
     )
diff --git a/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala
b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala
index db53ba8..854cf3c 100644
--- a/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala
+++ b/integration/spark/src/main/scala/org/apache/spark/sql/secondaryindex/hive/CarbonInternalMetastore.scala
@@ -141,6 +141,10 @@ object CarbonInternalMetastore {
     val indexTableExists = CarbonIndexUtil.isIndexTableExists(carbonTable)
     // check if cg and fg index exists
     val indexExists = CarbonIndexUtil.isIndexExists(carbonTable)
+    // In case of non transactional table, no need to change the table schema.
+    if (!carbonTable.isTransactionalTable) {
+      return
+    }
     // tables created without property "indexTableExists", will return null, for those tables
enter
     // into below block, gather the actual data from hive and then set this property to true/false
     // then once the property has a value true/false, make decision based on the property
value
diff --git a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
index a93c939..ca5bd85 100644
--- a/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
+++ b/integration/spark/src/test/scala/org/apache/carbondata/spark/testsuite/dataload/TestLoadDataWithHiveSyntaxDefaultFormat.scala
@@ -686,7 +686,7 @@ class TestLoadDataWithHiveSyntaxDefaultFormat extends QueryTest with BeforeAndAf
   }
 
   test("test table with specified table path") {
-    val path = "./source1"
+    val path = s"$warehouse/source1"
     sql("drop table if exists table_path_test")
     sql(
       "CREATE table table_path_test (empno string, salary double) STORED AS carbondata "
+
@@ -706,7 +706,7 @@ class TestLoadDataWithHiveSyntaxDefaultFormat extends QueryTest with BeforeAndAf
   }
 
   test("test table with specified database and table path") {
-    val path = "./source2"
+    val path = s"$warehouse/source2"
     sql("drop database if exists test cascade")
     sql("create database if not exists test")
     sql("CREATE table test.table_path_test (empno string, salary double) " +


Mime
View raw message