carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From chenliang...@apache.org
Subject [01/20] carbondata git commit: [CARBONDATA-1346] SDV cluster tests
Date Tue, 01 Aug 2017 06:00:08 GMT
Repository: carbondata
Updated Branches:
  refs/heads/master 946e4ce5a -> 146249537


http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
index c328130..4620db0 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/hive/CarbonHiveMetaStore.scala
@@ -22,6 +22,7 @@ import org.apache.spark.sql.{RuntimeConfig, SparkSession}
 import org.apache.spark.sql.catalyst.TableIdentifier
 
 import org.apache.carbondata.core.cache.dictionary.ManageDictionaryAndBTree
+import org.apache.carbondata.core.indexstore.DataMapStoreManager
 import org.apache.carbondata.core.metadata.{AbsoluteTableIdentifier, CarbonMetadata, CarbonTableIdentifier}
 import org.apache.carbondata.core.metadata.converter.ThriftWrapperSchemaConverterImpl
 import org.apache.carbondata.core.metadata.schema.table.CarbonTable
@@ -82,6 +83,7 @@ class CarbonHiveMetaStore(conf: RuntimeConfig) extends CarbonFileMetastore(conf)
     CarbonHiveMetadataUtil.invalidateAndDropTable(dbName, tableName, sparkSession)
     // discard cached table info in cachedDataSourceTables
     sparkSession.sessionState.catalog.refreshTable(tableIdentifier)
+    DataMapStoreManager.getInstance().clearDataMap(identifier, "blocklet")
   }
 
   override def checkSchemasModifiedTimeAndReloadTables(storePath: String) {

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
b/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
index f764882..8aa25a4 100644
--- a/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
+++ b/integration/spark2/src/main/scala/org/apache/spark/sql/test/Spark2TestQueryExecutor.scala
@@ -17,10 +17,13 @@
 
 package org.apache.spark.sql.test
 
-import org.apache.spark.sql.{DataFrame, SparkSession, SQLContext}
+import org.apache.spark.SparkConf
+import org.apache.spark.sql._
+import org.apache.spark.sql.test.TestQueryExecutor.integrationPath
 
 import org.apache.carbondata.common.logging.LogServiceFactory
 import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.impl.FileFactory
 import org.apache.carbondata.core.util.CarbonProperties
 
 /**
@@ -32,27 +35,41 @@ class Spark2TestQueryExecutor extends TestQueryExecutorRegister {
   override def sql(sqlText: String): DataFrame = Spark2TestQueryExecutor.spark.sql(sqlText)
 
   override def sqlContext: SQLContext = Spark2TestQueryExecutor.spark.sqlContext
+
+  override def stop(): Unit = Spark2TestQueryExecutor.spark.stop()
 }
 
 object Spark2TestQueryExecutor {
   private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
   LOGGER.info("use TestQueryExecutorImplV2")
   CarbonProperties.getInstance()
-    .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, TestQueryExecutor.timestampFormat)
     .addProperty(CarbonCommonConstants.STORE_LOCATION_TEMP_PATH,
       System.getProperty("java.io.tmpdir"))
-    .addProperty(CarbonCommonConstants.LOCK_TYPE, CarbonCommonConstants.CARBON_LOCK_TYPE_LOCAL)
     .addProperty(CarbonCommonConstants.CARBON_BAD_RECORDS_ACTION, "FORCE")
 
 
   import org.apache.spark.sql.CarbonSession._
+
+  val conf = new SparkConf()
+  if (!TestQueryExecutor.masterUrl.startsWith("local")) {
+    conf.setJars(TestQueryExecutor.jars).
+      set("spark.driver.memory", "4g").
+      set("spark.executor.memory", "8g").
+      set("spark.executor.cores", "4").
+      set("spark.cores.max", "8")
+    FileFactory.getConfiguration.
+      set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER")
+  }
+  val metastoredb = s"$integrationPath/spark-common-cluster-test/target"
   val spark = SparkSession
-    .builder()
-    .master("local[2]")
+    .builder().config(conf)
+    .master(TestQueryExecutor.masterUrl)
     .appName("Spark2TestQueryExecutor")
     .enableHiveSupport()
     .config("spark.sql.warehouse.dir", TestQueryExecutor.warehouse)
+    .config("spark.sql.crossJoin.enabled", "true")
     .getOrCreateCarbonSession(null, TestQueryExecutor.metastoredb)
+  FileFactory.getConfiguration.
+    set("dfs.client.block.write.replace-datanode-on-failure.policy", "NEVER")
   spark.sparkContext.setLogLevel("ERROR")
-
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
----------------------------------------------------------------------
diff --git a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
index 7f836f9..30485d1 100644
--- a/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
+++ b/integration/spark2/src/test/scala/org/apache/spark/carbondata/restructure/vectorreader/AddColumnTestCases.scala
@@ -26,6 +26,8 @@ import org.apache.spark.sql.common.util.Spark2QueryTest
 import org.apache.spark.sql.test.TestQueryExecutor
 import org.scalatest.BeforeAndAfterAll
 
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
 import org.apache.carbondata.spark.exception.MalformedCarbonCommandException
 
 class AddColumnTestCases extends Spark2QueryTest with BeforeAndAfterAll {
@@ -44,6 +46,7 @@ class AddColumnTestCases extends Spark2QueryTest with BeforeAndAfterAll
{
     sql(s"LOAD DATA LOCAL INPATH '$resourcesPath/restructure/data1.csv' INTO TABLE addcolumntest
" +
         s"OPTIONS('FILEHEADER'='intField,stringField,charField,timestampField,decimalField')")
     sql("CREATE TABLE hivetable STORED AS PARQUET SELECT * FROM addcolumntest")
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
"dd-MM-yyy")
   }
 
   test("test like query on new column") {
@@ -607,5 +610,7 @@ class AddColumnTestCases extends Spark2QueryTest with BeforeAndAfterAll
{
     sql("DROP TABLE IF EXISTS alter_sort_columns")
     sql("DROP TABLE IF EXISTS alter_no_dict")
     sqlContext.setConf("carbon.enable.vector.reader", "false")
+    CarbonProperties.getInstance().addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT,
+      CarbonCommonConstants.CARBON_TIMESTAMP_DEFAULT_FORMAT)
   }
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/14624953/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 7af7b64..3efdf96 100644
--- a/pom.xml
+++ b/pom.xml
@@ -114,6 +114,9 @@
     <spark.deps.scope>compile</spark.deps.scope>
     <scala.deps.scope>compile</scala.deps.scope>
     <dev.path>${basedir}/dev</dev.path>
+    <spark.master.url>local[2]</spark.master.url>
+    <hdfs.url>local</hdfs.url>
+    <suite.name>org.apache.carbondata.cluster.sdv.suite.SDVSuites</suite.name>
   </properties>
 
   <repositories>
@@ -135,18 +138,60 @@
         <artifactId>hadoop-common</artifactId>
         <version>${hadoop.version}</version>
         <scope>${hadoop.deps.scope}</scope>
+        <exclusions>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-all</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>javax.servlet</groupId>
+            <artifactId>*</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>javax.servlet.jsp</groupId>
+            <artifactId>*</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-hdfs</artifactId>
         <version>${hadoop.version}</version>
         <scope>${hadoop.deps.scope}</scope>
+        <exclusions>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-all</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>javax.servlet</groupId>
+            <artifactId>*</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>javax.servlet.jsp</groupId>
+            <artifactId>*</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-mapreduce-client-core</artifactId>
         <version>${hadoop.version}</version>
         <scope>${hadoop.deps.scope}</scope>
+        <exclusions>
+          <exclusion>
+            <groupId>io.netty</groupId>
+            <artifactId>netty-all</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>javax.servlet</groupId>
+            <artifactId>*</artifactId>
+          </exclusion>
+          <exclusion>
+            <groupId>javax.servlet.jsp</groupId>
+            <artifactId>*</artifactId>
+          </exclusion>
+        </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.spark</groupId>
@@ -400,6 +445,12 @@
       <id>include-all</id>
     </profile>
     <profile>
+      <id>sdvtest</id>
+      <modules>
+        <module>integration/spark-common-cluster-test</module>
+      </modules>
+    </profile>
+    <profile>
       <id>rat</id>
       <build>
         <plugins>


Mime
View raw message