carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From ravipes...@apache.org
Subject carbondata git commit: [CARBONDATA-2103]Make show datamaps configurable in show tables command
Date Fri, 02 Mar 2018 14:46:13 GMT
Repository: carbondata
Updated Branches:
  refs/heads/branch-1.3 b9a6b6865 -> 660190fb5


[CARBONDATA-2103]Make show datamaps configurable in show tables command

Make the show datamaps in show tables configurable:

a new carbon property is added called carbon.query.show.datamaps, by default is it true, show
show tables will list all the table including main table and datamaps.
if we want to filter datamaps in show tables, configure this as false

This closes #2015


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/660190fb
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/660190fb
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/660190fb

Branch: refs/heads/branch-1.3
Commit: 660190fb544e338acd131e7cc30de171e7600df6
Parents: b9a6b68
Author: akashrn5 <akashnilugal@gmail.com>
Authored: Wed Feb 28 17:38:50 2018 +0530
Committer: ravipesala <ravi.pesala@gmail.com>
Committed: Fri Mar 2 20:15:23 2018 +0530

----------------------------------------------------------------------
 .../core/constants/CarbonCommonConstants.java   | 17 ++++++++++++++++
 .../preaggregate/TestPreAggCreateCommand.scala  | 21 ++++++++++++++++----
 .../command/table/CarbonShowTablesCommand.scala | 20 ++++++++++++++-----
 .../spark/sql/hive/CarbonSessionState.scala     | 17 ++++++++++++----
 .../spark/sql/hive/CarbonSessionState.scala     | 17 ++++++++++++----
 5 files changed, 75 insertions(+), 17 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/660190fb/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
----------------------------------------------------------------------
diff --git a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
index 6e6482d..fa2b7d8 100644
--- a/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
+++ b/core/src/main/java/org/apache/carbondata/core/constants/CarbonCommonConstants.java
@@ -1598,6 +1598,23 @@ public final class CarbonCommonConstants {
       "carbon.query.validate.directqueryondatamap";
   public static final String VALIDATE_DIRECT_QUERY_ON_DATAMAP_DEFAULTVALUE = "true";
 
+  /**
+   * If the heap memory allocations of the given size is greater or equal than this value,
+   * it should go through the pooling mechanism.
+   * But if set this size to -1, it should not go through the pooling mechanism.
+   * Default value is 1048576(1MB, the same as Spark).
+   * Unit: byte.
+   */
+  @CarbonProperty
+  public static final String CARBON_HEAP_MEMORY_POOLING_THRESHOLD_BYTES =
+      "carbon.heap.memory.pooling.threshold.bytes";
+  public static final String CARBON_HEAP_MEMORY_POOLING_THRESHOLD_BYTES_DEFAULT = "1048576";
+
+  @CarbonProperty
+  public static final String CARBON_SHOW_DATAMAPS = "carbon.query.show.datamaps";
+
+  public static final String CARBON_SHOW_DATAMAPS_DEFAULT = "true";
+
   private CarbonCommonConstants() {
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/660190fb/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
index 8b71a31..5b7c310 100644
--- a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/preaggregate/TestPreAggCreateCommand.scala
@@ -19,16 +19,18 @@ package org.apache.carbondata.integration.spark.testsuite.preaggregate
 
 import scala.collection.JavaConverters._
 
-import org.apache.spark.sql.{AnalysisException, CarbonDatasourceHadoopRelation}
+import org.apache.spark.sql.{AnalysisException, CarbonDatasourceHadoopRelation, Row}
 import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
 import org.apache.spark.sql.execution.datasources.LogicalRelation
 import org.apache.spark.sql.hive.CarbonRelation
 import org.apache.spark.sql.test.util.QueryTest
 import org.scalatest.BeforeAndAfterAll
 
+import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.metadata.encoder.Encoding
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable
 import org.apache.carbondata.core.metadata.schema.datamap.DataMapProvider.TIMESERIES
+import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.spark.exception.{MalformedCarbonCommandException, MalformedDataMapCommandException}
 
 class TestPreAggCreateCommand extends QueryTest with BeforeAndAfterAll {
@@ -237,6 +239,7 @@ class TestPreAggCreateCommand extends QueryTest with BeforeAndAfterAll
{
 
   val timeSeries = TIMESERIES.toString
   test("remove agg tables from show table command") {
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_SHOW_DATAMAPS,"false")
     sql("DROP TABLE IF EXISTS tbl_1")
     sql("DROP TABLE IF EXISTS sparktable")
     sql("create table if not exists  tbl_1(imei string,age int,mac string ,prodate timestamp,update
timestamp,gamepoint double,contrid double) stored by 'carbondata' ")
@@ -249,6 +252,7 @@ class TestPreAggCreateCommand extends QueryTest with BeforeAndAfterAll
{
       ".eventTime'='prodate', 'timeseries.hierarchy'='hour=1,day=1,month=1,year=1') as select
prodate," +
       "mac from tbl_1 group by prodate,mac")
     checkExistence(sql("show tables"), false, "tbl_1_preagg_sum","tbl_1_agg2_day","tbl_1_agg2_hour","tbl_1_agg2_month","tbl_1_agg2_year")
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_SHOW_DATAMAPS,CarbonCommonConstants.CARBON_SHOW_DATAMAPS_DEFAULT)
   }
 
   test("test pre agg  create table 21: create with preaggregate and hierarchy") {
@@ -305,12 +309,14 @@ class TestPreAggCreateCommand extends QueryTest with BeforeAndAfterAll
{
   }
 
   test("remove  agg tables from show table command") {
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_SHOW_DATAMAPS,"false")
     sql("DROP TABLE IF EXISTS tbl_1")
     sql("create table if not exists  tbl_1(imei string,age int,mac string ,prodate timestamp,update
timestamp,gamepoint double,contrid double) stored by 'carbondata' ")
     sql("create datamap agg1 on table tbl_1 using 'preaggregate' as select mac, sum(age)
from tbl_1 group by mac")
     sql("create table if not exists  sparktable(imei string,age int,mac string ,prodate timestamp,update
timestamp,gamepoint double,contrid double) ")
     checkExistence(sql("show tables"), false, "tbl_1_agg1")
     checkExistence(sql("show tables"), true, "sparktable","tbl_1")
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_SHOW_DATAMAPS,
CarbonCommonConstants.CARBON_SHOW_DATAMAPS_DEFAULT)
   }
 
 
@@ -380,10 +386,14 @@ class TestPreAggCreateCommand extends QueryTest with BeforeAndAfterAll
{
   }
 
   test("test show tables filterted with datamaps") {
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_SHOW_DATAMAPS,"false")
     sql("create table showTables(name string, age int) stored by 'carbondata'")
-    sql("create datamap preAgg on table showTables using 'preaggregate' as select sum(age)
from showTables")
+    sql(
+      "create datamap preAgg on table showTables using 'preaggregate' as select sum(age)
from showTables")
     sql("show tables").show()
-    assert(!sql("show tables").collect().contains("showTables_preagg"))
+    checkExistence(sql("show tables"), false, "showtables_preagg")
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_SHOW_DATAMAPS,CarbonCommonConstants.CARBON_SHOW_DATAMAPS_DEFAULT)
+    checkExistence(sql("show tables"), true, "showtables_preagg")
   }
 
   test("test create main and preagg table of same name in two database") {
@@ -426,7 +436,10 @@ class TestPreAggCreateCommand extends QueryTest with BeforeAndAfterAll
{
     carbonTable
   }
 
-  override def afterAll { 
+  override def afterAll {
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_SHOW_DATAMAPS,
+        CarbonCommonConstants.CARBON_SHOW_DATAMAPS_DEFAULT)
     sql("drop database if exists otherDB cascade")
     sql("drop table if exists maintable")
     sql("drop table if exists PreAggMain")

http://git-wip-us.apache.org/repos/asf/carbondata/blob/660190fb/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
index e3c4e97..534703d 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/execution/command/table/CarbonShowTablesCommand.scala
@@ -17,9 +17,7 @@
 
 package org.apache.spark.sql.execution.command.table
 
-import scala.collection.JavaConverters._
-
-import org.apache.spark.sql.{CarbonEnv, Row, SparkSession}
+import org.apache.spark.sql.{Row, SparkSession}
 import org.apache.spark.sql.catalyst.TableIdentifier
 import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
 import org.apache.spark.sql.execution.command.MetadataCommand
@@ -44,13 +42,25 @@ private[sql] case class CarbonShowTablesCommand ( databaseName: Option[String],
     var tables =
       tableIdentifierPattern.map(catalog.listTables(db, _)).getOrElse(catalog.listTables(db))
     val externalCatalog = sparkSession.sharedState.externalCatalog
+    // this method checks whether the table is mainTable or datamap based on property "isVisible"
+    def isMainTable(tableIdent: TableIdentifier) = {
+      var isMainTable = true
+      try {
+        isMainTable = externalCatalog.getTable(db, tableIdent.table).storage.properties
+          .getOrElse("isVisible", true).toString.toBoolean
+      } catch {
+        case ex: Throwable =>
+        // ignore the exception for show tables
+      }
+      isMainTable
+    }
     // tables will be filtered for all the dataMaps to show only main tables
     tables.collect {
-      case tableIdent if externalCatalog.getTable(db, tableIdent.table).storage.properties
-        .getOrElse("isVisible", true).toString.toBoolean =>
+      case tableIdent if isMainTable(tableIdent) =>
         val isTemp = catalog.isTemporaryTable(tableIdent)
         Row(tableIdent.database.getOrElse("default"), tableIdent.table, isTemp)
     }
+
   }
 
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/660190fb/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSessionState.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSessionState.scala
b/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSessionState.scala
index d45020b..1b7f0cb 100644
--- a/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSessionState.scala
+++ b/integration/spark2/src/main/spark2.1/org/apache/spark/sql/hive/CarbonSessionState.scala
@@ -36,6 +36,7 @@ import org.apache.spark.sql.optimizer.{CarbonIUDRule, CarbonLateDecodeRule,
Carb
 import org.apache.spark.sql.parser.{CarbonHelperSqlAstBuilder, CarbonSpark2SqlParser, CarbonSparkSqlParser}
 import org.apache.spark.sql.{CarbonDatasourceHadoopRelation, CarbonEnv, ExperimentalMethods,
SparkSession, Strategy}
 
+import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datamap.DataMapStoreManager
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier
 import org.apache.carbondata.core.util.CarbonProperties
@@ -332,9 +333,17 @@ class CarbonSqlAstBuilder(conf: SQLConf, parser: CarbonSpark2SqlParser,
sparkSes
     }
   }
 
-  override def visitShowTables(ctx: ShowTablesContext): LogicalPlan = withOrigin(ctx) {
-    CarbonShowTablesCommand(
-      Option(ctx.db).map(_.getText),
-      Option(ctx.pattern).map(string))
+  override def visitShowTables(ctx: ShowTablesContext): LogicalPlan = {
+    withOrigin(ctx) {
+      if (CarbonProperties.getInstance()
+        .getProperty(CarbonCommonConstants.CARBON_SHOW_DATAMAPS,
+          CarbonCommonConstants.CARBON_SHOW_DATAMAPS_DEFAULT).toBoolean) {
+        super.visitShowTables(ctx)
+      } else {
+        CarbonShowTablesCommand(
+          Option(ctx.db).map(_.getText),
+          Option(ctx.pattern).map(string))
+      }
+    }
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/660190fb/integration/spark2/src/main/spark2.2/org/apache/spark/sql/hive/CarbonSessionState.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/spark2.2/org/apache/spark/sql/hive/CarbonSessionState.scala
b/integration/spark2/src/main/spark2.2/org/apache/spark/sql/hive/CarbonSessionState.scala
index b9425d6..a119bda 100644
--- a/integration/spark2/src/main/spark2.2/org/apache/spark/sql/hive/CarbonSessionState.scala
+++ b/integration/spark2/src/main/spark2.2/org/apache/spark/sql/hive/CarbonSessionState.scala
@@ -44,6 +44,7 @@ import org.apache.spark.sql.parser.{CarbonHelperSqlAstBuilder, CarbonSpark2SqlPa
 import org.apache.spark.sql.types.DecimalType
 import org.apache.spark.util.CarbonReflectionUtils
 
+import org.apache.carbondata.core.constants.CarbonCommonConstants
 import org.apache.carbondata.core.datamap.DataMapStoreManager
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier
 import org.apache.carbondata.core.util.CarbonProperties
@@ -391,9 +392,17 @@ class CarbonSqlAstBuilder(conf: SQLConf, parser: CarbonSpark2SqlParser,
sparkSes
     super.visitCreateTable(ctx)
   }
 
-  override def visitShowTables(ctx: ShowTablesContext): LogicalPlan = withOrigin(ctx) {
-    CarbonShowTablesCommand(
-      Option(ctx.db).map(_.getText),
-      Option(ctx.pattern).map(string))
+  override def visitShowTables(ctx: ShowTablesContext): LogicalPlan = {
+    withOrigin(ctx) {
+      if (CarbonProperties.getInstance()
+        .getProperty(CarbonCommonConstants.CARBON_SHOW_DATAMAPS,
+          CarbonCommonConstants.CARBON_SHOW_DATAMAPS_DEFAULT).toBoolean) {
+        super.visitShowTables(ctx)
+      } else {
+        CarbonShowTablesCommand(
+          Option(ctx.db).map(_.getText),
+          Option(ctx.pattern).map(string))
+      }
+    }
   }
 }


Mime
View raw message