carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From gvram...@apache.org
Subject [1/2] carbondata git commit: [CARBONDATA-2607][Complex Column Enhancements] Complex Primitive DataType Adaptive Encoding
Date Tue, 10 Jul 2018 14:28:42 GMT
Repository: carbondata
Updated Branches:
  refs/heads/master 75126c6ca -> 438b4421e


http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/integration/spark-common-test/src/test/resources/adap.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/adap.csv b/integration/spark-common-test/src/test/resources/adap.csv
new file mode 100644
index 0000000..de553d3
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/adap.csv
@@ -0,0 +1,3 @@
+1,500$abc$20:30:40
+2,600$abc$20:30:40
+3,600$abc$20:30:40

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/integration/spark-common-test/src/test/resources/adap_double1.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/adap_double1.csv b/integration/spark-common-test/src/test/resources/adap_double1.csv
new file mode 100644
index 0000000..148c73f
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/adap_double1.csv
@@ -0,0 +1,3 @@
+1,1.323$abc$2.2:3.3:4.4
+2,1.323$abc$2.2:3.3:4.4
+3,1.323$abc$2.2:3.3:4.4

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/integration/spark-common-test/src/test/resources/adap_double2.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/adap_double2.csv b/integration/spark-common-test/src/test/resources/adap_double2.csv
new file mode 100644
index 0000000..9c71288
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/adap_double2.csv
@@ -0,0 +1,3 @@
+1,1.323$abc$20.2:30.3:40.4
+2,2.323$abc$20.2:30.3:40.4
+3,4.323$abc$20.2:30.3:40.4

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/integration/spark-common-test/src/test/resources/adap_double3.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/adap_double3.csv b/integration/spark-common-test/src/test/resources/adap_double3.csv
new file mode 100644
index 0000000..c85574c
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/adap_double3.csv
@@ -0,0 +1,3 @@
+1,1.323$abc$20.2:30.3:500.423
+2,2.323$abc$20.2:30.3:500.423
+3,50.323$abc$20.2:30.3:500.423

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/integration/spark-common-test/src/test/resources/adap_double4.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/adap_double4.csv b/integration/spark-common-test/src/test/resources/adap_double4.csv
new file mode 100644
index 0000000..a1e822b
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/adap_double4.csv
@@ -0,0 +1,3 @@
+1,1.323$abc$20.2:30.3:50000.423
+2,2.323$abc$20.2:30.3:50000.423
+3,50000.323$abc$20.2:30.3:50000.423

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/integration/spark-common-test/src/test/resources/adap_int1.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/adap_int1.csv b/integration/spark-common-test/src/test/resources/adap_int1.csv
new file mode 100644
index 0000000..5db704e
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/adap_int1.csv
@@ -0,0 +1,3 @@
+1,500$abc$200:300:400
+2,700$abc$200:300:400
+3,800$abc$200:300:400

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/integration/spark-common-test/src/test/resources/adap_int2.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/adap_int2.csv b/integration/spark-common-test/src/test/resources/adap_int2.csv
new file mode 100644
index 0000000..b67b8cc
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/adap_int2.csv
@@ -0,0 +1,3 @@
+1,50000$abc$2000000:3000000:4000000
+2,70000$abc$2000000:3000000:4000000
+3,100000$abc$2000000:3000000:4000000

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/integration/spark-common-test/src/test/resources/adap_int3.csv
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/resources/adap_int3.csv b/integration/spark-common-test/src/test/resources/adap_int3.csv
new file mode 100644
index 0000000..ea0fed6
--- /dev/null
+++ b/integration/spark-common-test/src/test/resources/adap_int3.csv
@@ -0,0 +1,3 @@
+1,500000$abc$200:300:52000000
+2,7000000$abc$200:300:52000000
+3,10000000$abc$200:300:52000000

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
new file mode 100644
index 0000000..6b0a13f
--- /dev/null
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveComplexType.scala
@@ -0,0 +1,554 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.integration.spark.testsuite.complexType
+
+import java.sql.Timestamp
+
+import scala.collection.mutable
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.test.util.QueryTest
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+trait TestAdaptiveComplexType extends QueryTest {
+
+  test("test INT with struct and array, Encoding INT-->BYTE") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:int,name:string,marks:array<int>>) " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap.csv' into table adaptive options('delimiter'=','," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500, "abc", mutable.WrappedArray.make(Array(20, 30, 40)))),
+        Row(2, Row(600, "abc", mutable.WrappedArray.make(Array(20, 30, 40)))),
+        Row(3, Row(600, "abc", mutable.WrappedArray.make(Array(20, 30, 40))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:int,name:string,marks:array<int>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'500$abc$20:30:40')")
+    sql("insert into adaptive values(2,'600$abc$20:30:40')")
+    sql("insert into adaptive values(3,'600$abc$20:30:40')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500, "abc", mutable.WrappedArray.make(Array(20, 30, 40)))),
+        Row(2, Row(600, "abc", mutable.WrappedArray.make(Array(20, 30, 40)))),
+        Row(3, Row(600, "abc", mutable.WrappedArray.make(Array(20, 30, 40))))))
+  }
+
+  test("test INT with struct and array, Encoding INT-->SHORT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:int,name:string,marks:array<int>>) " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_int1.csv' into table adaptive options('delimiter'=','," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500, "abc", mutable.WrappedArray.make(Array(200, 300, 400)))),
+        Row(2, Row(700, "abc", mutable.WrappedArray.make(Array(200, 300, 400)))),
+        Row(3, Row(800, "abc", mutable.WrappedArray.make(Array(200, 300, 400))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:int,name:string,marks:array<int>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'500$abc$200:300:400')")
+    sql("insert into adaptive values(2,'700$abc$200:300:400')")
+    sql("insert into adaptive values(3,'800$abc$200:300:400')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500, "abc", mutable.WrappedArray.make(Array(200, 300, 400)))),
+        Row(2, Row(700, "abc", mutable.WrappedArray.make(Array(200, 300, 400)))),
+        Row(3, Row(800, "abc", mutable.WrappedArray.make(Array(200, 300, 400))))))
+  }
+
+  test("test INT with struct and array, Encoding INT-->SHORT INT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:int,name:string,marks:array<int>>) " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_int2.csv' into table adaptive options('delimiter'=','," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(50000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000)))),
+        Row(2, Row(70000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000)))),
+        Row(3, Row(100000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:int,name:string,marks:array<int>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'50000$abc$2000000:3000000:4000000')")
+    sql("insert into adaptive values(2,'70000$abc$2000000:3000000:4000000')")
+    sql("insert into adaptive values(3,'100000$abc$2000000:3000000:4000000')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(50000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000)))),
+        Row(2, Row(70000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000)))),
+        Row(3, Row(100000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000))))))
+  }
+
+  test("test INT with struct and array, Encoding INT-->INT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:int,name:string,marks:array<int>>) " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_int3.csv' into table adaptive options('delimiter'=','," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000)))),
+        Row(2, Row(7000000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000)))),
+        Row(3, Row(10000000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:int,name:string,marks:array<int>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'500000$abc$200:300:52000000')")
+    sql("insert into adaptive values(2,'700000$abc$200:300:52000000')")
+    sql("insert into adaptive values(3,'10000000$abc$200:300:52000000')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000)))),
+        Row(2, Row(700000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000)))),
+        Row(3, Row(10000000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000))))))
+  }
+
+
+  test("test SMALLINT with struct and array SMALLINT --> BYTE") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:smallint,name:string," +
+      "marks:array<smallint>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'100$abc$20:30:40')")
+    sql("insert into adaptive values(2,'200$abc$30:40:50')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(100, "abc", mutable.WrappedArray.make(Array(20, 30, 40)))),
+        Row(2, Row(200, "abc", mutable.WrappedArray.make(Array(30, 40, 50))))))
+  }
+
+  test("test SMALLINT with struct and array SMALLINT --> SHORT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:smallint,name:string," +
+      "marks:array<smallint>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'500$abc$200:300:400')")
+    sql("insert into adaptive values(2,'8000$abc$300:400:500')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500, "abc", mutable.WrappedArray.make(Array(200, 300, 400)))),
+        Row(2, Row(8000, "abc", mutable.WrappedArray.make(Array(300, 400, 500))))))
+  }
+
+  test("test BigInt with struct and array BIGINT --> BYTE") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:bigint,name:string," +
+      "marks:array<bigint>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'1$abc$20:30:40')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(1, "abc", mutable.WrappedArray.make(Array(20, 30, 40))))))
+  }
+
+  test("test BigInt with struct and array BIGINT --> SHORT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:bigint,name:string," +
+      "marks:array<bigint>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'500$abc$200:300:400')")
+    sql("insert into adaptive values(2,'8000$abc$300:400:500')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500, "abc", mutable.WrappedArray.make(Array(200, 300, 400)))),
+        Row(2, Row(8000, "abc", mutable.WrappedArray.make(Array(300, 400, 500))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:BIGINT,name:string,marks:array<BIGINT>>)" +
+      " " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_int1.csv' into table adaptive options('delimiter'=','," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500, "abc", mutable.WrappedArray.make(Array(200, 300, 400)))),
+        Row(2, Row(700, "abc", mutable.WrappedArray.make(Array(200, 300, 400)))),
+        Row(3, Row(800, "abc", mutable.WrappedArray.make(Array(200, 300, 400))))))
+  }
+
+  test("test BigInt with struct and array BIGINT --> SHORT INT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:bigint,name:string," +
+      "marks:array<bigint>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'50000$abc$2000000:3000000:4000000')")
+    sql("insert into adaptive values(2,'70000$abc$2000000:3000000:4000000')")
+    sql("insert into adaptive values(3,'100000$abc$2000000:3000000:4000000')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(50000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000)))),
+        Row(2, Row(70000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000)))),
+        Row(3, Row(100000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:BIGINT,name:string,marks:array<BIGINT>>)" +
+      " " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_int2.csv' into table adaptive options('delimiter'=','," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(50000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000)))),
+        Row(2, Row(70000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000)))),
+        Row(3, Row(100000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000))))))
+  }
+
+  test("test BIGINT with struct and array, Encoding INT-->INT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:BIGINT,name:string,marks:array<BIGINT>>)" +
+      " " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_int3.csv' into table adaptive options('delimiter'=','," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000)))),
+        Row(2, Row(7000000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000)))),
+        Row(3, Row(10000000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:BIGINT,name:string,marks:array<BIGINT>>)" +
+      " " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'500000$abc$200:300:52000000')")
+    sql("insert into adaptive values(2,'700000$abc$200:300:52000000')")
+    sql("insert into adaptive values(3,'10000000$abc$200:300:52000000')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000)))),
+        Row(2, Row(700000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000)))),
+        Row(3, Row(10000000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000))))))
+  }
+
+  test("test Double with Struct and Array DOUBLE --> BYTE") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:double,name:string," +
+      "marks:array<double>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'1.323$abc$2.2:3.3:4.4')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(1.323, "abc", mutable.WrappedArray.make(Array(2.2, 3.3, 4.4))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:double,name:string,marks:array<double>>)" +
+      " " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_double1.csv' into table adaptive options('delimiter'='," +
+      "'," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(1.323, "abc", mutable.WrappedArray.make(Array(2.2, 3.3, 4.4)))),
+        Row(2, Row(1.323, "abc", mutable.WrappedArray.make(Array(2.2, 3.3, 4.4)))),
+        Row(3, Row(1.323, "abc", mutable.WrappedArray.make(Array(2.2, 3.3, 4.4))))))
+  }
+
+  test("test Double with Struct and Array DOUBLE --> SHORT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:double,name:string," +
+      "marks:array<double>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'1.323$abc$20.2:30.3:40.4')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(1.323, "abc", mutable.WrappedArray.make(Array(20.2, 30.3, 40.4))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:double,name:string,marks:array<double>>)" +
+      " " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_double2.csv' into table adaptive options('delimiter'='," +
+      "'," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(1.323, "abc", mutable.WrappedArray.make(Array(20.2, 30.3, 40.4)))),
+        Row(2, Row(2.323, "abc", mutable.WrappedArray.make(Array(20.2, 30.3, 40.4)))),
+        Row(3, Row(4.323, "abc", mutable.WrappedArray.make(Array(20.2, 30.3, 40.4))))))
+  }
+
+  test("test Double with Struct and Array DOUBLE --> SHORT INT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:double,name:string," +
+      "marks:array<double>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'10.323$abc$20.2:30.3:500.423')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(10.323, "abc", mutable.WrappedArray.make(Array(20.2, 30.3, 500.423))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:double,name:string,marks:array<double>>)" +
+      " " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_double3.csv' into table adaptive options('delimiter'='," +
+      "'," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(1.323, "abc", mutable.WrappedArray.make(Array(20.2, 30.3, 500.423)))),
+        Row(2, Row(2.323, "abc", mutable.WrappedArray.make(Array(20.2, 30.3, 500.423)))),
+        Row(3, Row(50.323, "abc", mutable.WrappedArray.make(Array(20.2, 30.3, 500.423))))))
+  }
+
+  test("test Double with Struct and Array DOUBLE --> INT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:double,name:string," +
+      "marks:array<double>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'1000.323$abc$20.2:30.3:50000.423')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(1000.323, "abc", mutable.WrappedArray.make(Array(20.2, 30.3, 50000.423))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:double,name:string,marks:array<double>>)" +
+      " " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_double4.csv' into table adaptive options('delimiter'='," +
+      "'," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(1.323, "abc", mutable.WrappedArray.make(Array(20.2, 30.3, 50000.423)))),
+        Row(2, Row(2.323, "abc", mutable.WrappedArray.make(Array(20.2, 30.3, 50000.423)))),
+        Row(3, Row(50000.323, "abc", mutable.WrappedArray.make(Array(20.2, 30.3, 50000.423))))))
+  }
+
+  test("test Double with Struct and Array DOUBLE --> DOUBLE") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:double,name:string," +
+      "marks:array<double>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'1.797693134862315$abc$2.2:30.3:1.797693134862315')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1,
+        Row(1.797693134862315,
+          "abc",
+          mutable.WrappedArray.make(Array(2.2, 30.3, 1.797693134862315))))))
+
+  }
+
+  test("test Decimal with Struct") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:decimal(3,2),name:string>) stored by " +
+      "'carbondata'")
+    sql("insert into adaptive values(1,'3.2$abc')")
+    sql("select * from adaptive").show(false)
+  }
+
+  test("test Decimal with Array") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<name:string," +
+      "marks:array<decimal>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'abc$20.2:30.3:40.4')")
+    sql("select * from adaptive").show(false)
+  }
+
+  test("test Timestamp with Struct") {
+    sql("Drop table if exists adaptive")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
+    sql(
+      "create table adaptive(roll int, student struct<id:timestamp,name:string>) stored by " +
+      "'carbondata'")
+    sql("insert into adaptive values(1,'2017/01/01 00:00:00$abc')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(Timestamp.valueOf("2017-01-01 00:00:00.0"), "abc"))))
+  }
+
+  test("test Timestamp with Array") {
+    sql("Drop table if exists adaptive")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
+    sql(
+      "create table adaptive(roll int, student struct<name:string," +
+      "marks:array<timestamp>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'abc$2017/01/01:2018/01/01')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1,
+        Row("abc",
+          mutable.WrappedArray
+            .make(Array(Timestamp.valueOf("2017-01-01 00:00:00.0"),
+              Timestamp.valueOf("2018-01-01 00:00:00.0")))))))
+  }
+
+  test("test DATE with Array") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<name:string," +
+      "marks:array<date>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'abc$2017-01-01')")
+    sql("select * from adaptive").show(false)
+  }
+
+  test("test LONG with Array and Struct Encoding LONG --> BYTE") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:long,name:string,marks:array<long>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'11111$abc$20:30:40')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(11111, "abc", mutable.WrappedArray.make(Array(20, 30, 40))))))
+  }
+
+  test("test LONG with Array and Struct Encoding LONG --> SHORT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:long,name:string,marks:array<long>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'11111$abc$200:300:400')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(11111, "abc", mutable.WrappedArray.make(Array(200, 300, 400))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:LONG,name:string,marks:array<LONG>>) " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_int1.csv' into table adaptive options('delimiter'=','," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500, "abc", mutable.WrappedArray.make(Array(200, 300, 400)))),
+        Row(2, Row(700, "abc", mutable.WrappedArray.make(Array(200, 300, 400)))),
+        Row(3, Row(800, "abc", mutable.WrappedArray.make(Array(200, 300, 400))))))
+  }
+
+  test("test LONG with struct and array, Encoding LONG-->SHORT INT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:LONG,name:string,marks:array<LONG>>) " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_int2.csv' into table adaptive options('delimiter'=','," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(50000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000)))),
+        Row(2, Row(70000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000)))),
+        Row(3, Row(100000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:LONG,name:string,marks:array<LONG>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'50000$abc$2000000:3000000:4000000')")
+    sql("insert into adaptive values(2,'70000$abc$2000000:3000000:4000000')")
+    sql("insert into adaptive values(3,'100000$abc$2000000:3000000:4000000')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(50000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000)))),
+        Row(2, Row(70000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000)))),
+        Row(3, Row(100000, "abc", mutable.WrappedArray.make(Array(2000000, 3000000, 4000000))))))
+  }
+
+  test("test LONG with struct and array, Encoding LONG-->INT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:LONG,name:string,marks:array<LONG>>) " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_int3.csv' into table adaptive options('delimiter'=','," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000)))),
+        Row(2, Row(7000000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000)))),
+        Row(3, Row(10000000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:LONG,name:string,marks:array<LONG>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'500000$abc$200:300:52000000')")
+    sql("insert into adaptive values(2,'700000$abc$200:300:52000000')")
+    sql("insert into adaptive values(3,'10000000$abc$200:300:52000000')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000)))),
+        Row(2, Row(700000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000)))),
+        Row(3, Row(10000000, "abc", mutable.WrappedArray.make(Array(200, 300, 52000000))))))
+  }
+
+  test("test LONG with struct and array, Encoding LONG-->LONG") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:LONG,name:string,marks:array<LONG>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'500000$abc$200:300:52000000000')")
+    sql("insert into adaptive values(2,'700000$abc$200:300:52000000000')")
+    sql("insert into adaptive values(3,'10000000$abc$200:300:52000000000')")
+    sql("select * from adaptive").show(false)
+  }
+
+  test("test SHORT with Array and Struct Encoding SHORT -->BYTE") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:short,name:string,marks:array<short>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'11$abc$20:30:40')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(11, "abc", mutable.WrappedArray.make(Array(20, 30, 40))))))
+  }
+
+  test("test SHORT with Array and Struct Encoding SHORT --> SHORT") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:SHORT,name:string,marks:array<SHORT>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'11111$abc$200:300:400')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(11111, "abc", mutable.WrappedArray.make(Array(200, 300, 400))))))
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:SHORT,name:string,marks:array<SHORT>>) " +
+      "stored by 'carbondata'")
+    sql(
+      s"load data inpath '$resourcesPath/adap_int1.csv' into table adaptive options('delimiter'=','," +
+      "'quotechar'='\"','fileheader'='roll,student','complex_delimiter_level_1'='$'," +
+      "'complex_delimiter_level_2'=':')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(500, "abc", mutable.WrappedArray.make(Array(200, 300, 400)))),
+        Row(2, Row(700, "abc", mutable.WrappedArray.make(Array(200, 300, 400)))),
+        Row(3, Row(800, "abc", mutable.WrappedArray.make(Array(200, 300, 400))))))
+  }
+
+  test("test Boolean with Struct and Array") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:boolean,name:string," +
+      "marks:array<boolean>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'true$abc$false:true:false')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(true, "abc", mutable.WrappedArray.make(Array(false, true, false))))))
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingForNullValues.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingForNullValues.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingForNullValues.scala
new file mode 100644
index 0000000..528fb69
--- /dev/null
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingForNullValues.scala
@@ -0,0 +1,168 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.spark.testsuite.dataload
+
+import scala.collection.mutable
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.test.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+/**
+ * Test class of Adaptive Encoding UnSafe Column Page with Null values
+ *
+ */
+
+class TestAdaptiveEncodingForNullValues
+  extends QueryTest with BeforeAndAfterAll {
+
+  override def beforeAll(): Unit = {
+    sql("DROP TABLE IF EXISTS adaptive")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE,
+        "true")
+  }
+
+  override def afterAll(): Unit = {
+    sql("DROP TABLE IF EXISTS adaptive")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE,
+        "true")
+  }
+
+  test("test INT with struct and array, Encoding INT-->BYTE") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:int,name:string,marks:array<int>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'null$abc$null:null:null')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(null, "abc", mutable.WrappedArray.make(Array(null, null, null))))))
+  }
+
+
+  test("test SMALLINT with struct and array SMALLINT --> BYTE") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:smallint,name:string," +
+      "marks:array<smallint>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'null$abc$null:null:null')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(null, "abc", mutable.WrappedArray.make(Array(null, null, null))))))
+  }
+
+
+  test("test BigInt with struct and array BIGINT --> BYTE") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:bigint,name:string," +
+      "marks:array<bigint>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'null$abc$null:null:null')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(null, "abc", mutable.WrappedArray.make(Array(null, null, null))))))
+  }
+
+  test("test Double with Struct and Array DOUBLE --> BYTE") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:double,name:string," +
+      "marks:array<double>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'null$abc$null:null:null')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(null, "abc", mutable.WrappedArray.make(Array(null, null, null))))))
+  }
+
+  test("test Decimal with Struct") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:decimal(3,2),name:string," +
+      "marks:array<decimal>>) stored by " +
+      "'carbondata'")
+    sql("insert into adaptive values(1,'null$abc$null:null:null')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(null, "abc", mutable.WrappedArray.make(Array(null, null, null))))))
+  }
+
+  test("test Timestamp with Struct") {
+    sql("Drop table if exists adaptive")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
+    sql(
+      "create table adaptive(roll int, student struct<id:timestamp,name:string>) stored by " +
+      "'carbondata'")
+    sql("insert into adaptive values(1,'null$abc')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(null, "abc"))))
+  }
+
+  test("test Timestamp with Array") {
+    sql("Drop table if exists adaptive")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd")
+    sql(
+      "create table adaptive(roll int, student struct<name:string," +
+      "marks:array<timestamp>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'abc$null:null:null')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row("abc", mutable.WrappedArray.make(Array(null, null, null))))))
+  }
+
+  test("test DATE with Array") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<name:string," +
+      "marks:array<date>>) stored by 'carbondata'")
+    sql("insert into adaptive values(1,'abc$null:null:null')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row("abc", mutable.WrappedArray.make(Array(null, null, null))))))
+  }
+
+  test("test LONG with Array and Struct") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:long,name:string,marks:array<long>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'null$abc$null:null:null')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(null, "abc", mutable.WrappedArray.make(Array(null, null, null))))))
+  }
+
+  test("test SHORT with Array and Struct") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:short,name:string,marks:array<short>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'null$abc$null:null:null')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(null, "abc", mutable.WrappedArray.make(Array(null, null, null))))))
+  }
+
+  test("test Boolean with Struct and Array") {
+    sql("Drop table if exists adaptive")
+    sql(
+      "create table adaptive(roll int, student struct<id:boolean,name:string," +
+      "marks:array<boolean>>) " +
+      "stored by 'carbondata'")
+    sql("insert into adaptive values(1,'null$abc$null:null:null')")
+    checkAnswer(sql("select * from adaptive"),
+      Seq(Row(1, Row(null, "abc", mutable.WrappedArray.make(Array(null, null, null))))))
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingSafeColumnPageForComplexDataType.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingSafeColumnPageForComplexDataType.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingSafeColumnPageForComplexDataType.scala
new file mode 100644
index 0000000..75d08bb
--- /dev/null
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingSafeColumnPageForComplexDataType.scala
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.spark.testsuite.dataload
+
+import java.io.File
+import java.sql.Timestamp
+
+import scala.collection.mutable
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.test.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.integration.spark.testsuite.complexType.TestAdaptiveComplexType
+
+/**
+ * Test class of Adaptive Encoding Safe Column Page with Complex Data type
+ *
+ */
+
+class TestAdaptiveEncodingSafeColumnPageForComplexDataType
+  extends QueryTest with BeforeAndAfterAll  with TestAdaptiveComplexType {
+
+  override def beforeAll(): Unit = {
+    sql("DROP TABLE IF EXISTS adaptive")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE,
+        "false")
+  }
+
+  override def afterAll(): Unit = {
+    sql("DROP TABLE IF EXISTS adaptive")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE,
+        "true")
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeColumnPageForComplexDataType.scala
----------------------------------------------------------------------
diff --git a/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeColumnPageForComplexDataType.scala b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeColumnPageForComplexDataType.scala
new file mode 100644
index 0000000..8375195
--- /dev/null
+++ b/integration/spark-common-test/src/test/scala/org/apache/carbondata/integration/spark/testsuite/complexType/TestAdaptiveEncodingUnsafeColumnPageForComplexDataType.scala
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.spark.testsuite.dataload
+
+import java.io.{File, PrintWriter}
+import java.sql.Timestamp
+
+import scala.collection.mutable
+import scala.util.Random
+
+import org.apache.spark.sql.Row
+import org.apache.spark.sql.test.util.QueryTest
+import org.scalatest.BeforeAndAfterAll
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+import org.apache.carbondata.integration.spark.testsuite.complexType.TestAdaptiveComplexType
+
+/**
+ * Test class of Adaptive Encoding UnSafe Column Page with Complex Data type
+ *
+ */
+
+class TestAdaptiveEncodingUnsafeColumnPageForComplexDataType
+  extends QueryTest with BeforeAndAfterAll with TestAdaptiveComplexType {
+
+  override def beforeAll(): Unit = {
+
+    new File(CarbonProperties.getInstance().getSystemFolderLocation).delete()
+    sql("DROP TABLE IF EXISTS adaptive")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE,
+        "true")
+  }
+
+  override def afterAll(): Unit = {
+    sql("DROP TABLE IF EXISTS adaptive")
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE,
+        "true")
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/processing/src/main/java/org/apache/carbondata/processing/datatypes/ArrayDataType.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/datatypes/ArrayDataType.java b/processing/src/main/java/org/apache/carbondata/processing/datatypes/ArrayDataType.java
index da34746..05754eb 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/datatypes/ArrayDataType.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/datatypes/ArrayDataType.java
@@ -24,9 +24,11 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.carbondata.core.datastore.ColumnType;
+import org.apache.carbondata.core.datastore.row.ComplexColumnInfo;
 import org.apache.carbondata.core.devapi.DictionaryGenerationException;
 import org.apache.carbondata.core.keygenerator.KeyGenException;
 import org.apache.carbondata.core.keygenerator.KeyGenerator;
+import org.apache.carbondata.core.util.DataTypeUtil;
 import org.apache.carbondata.processing.loading.complexobjects.ArrayObject;
 import org.apache.carbondata.processing.loading.converter.BadRecordLogHolder;
 
@@ -292,14 +294,10 @@ public class ArrayDataType implements GenericDataType<ArrayObject> {
   }
 
   @Override
-  public void getChildrenType(List<ColumnType> type) {
-    type.add(ColumnType.COMPLEX_ARRAY);
-    children.getChildrenType(type);
+  public void getComplexColumnInfo(List<ComplexColumnInfo> columnInfoList) {
+    columnInfoList.add(
+        new ComplexColumnInfo(ColumnType.COMPLEX_ARRAY, DataTypeUtil.valueOf("array"),
+            name, false));
+    children.getComplexColumnInfo(columnInfoList);
   }
-
-  @Override public void getColumnNames(List<String> columnNameList) {
-    columnNameList.add(name);
-    children.getColumnNames(columnNameList);
-  }
-
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/processing/src/main/java/org/apache/carbondata/processing/datatypes/GenericDataType.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/datatypes/GenericDataType.java b/processing/src/main/java/org/apache/carbondata/processing/datatypes/GenericDataType.java
index 049bf57..68315d3 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/datatypes/GenericDataType.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/datatypes/GenericDataType.java
@@ -23,7 +23,7 @@ import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.carbondata.core.datastore.ColumnType;
+import org.apache.carbondata.core.datastore.row.ComplexColumnInfo;
 import org.apache.carbondata.core.devapi.DictionaryGenerationException;
 import org.apache.carbondata.core.keygenerator.KeyGenException;
 import org.apache.carbondata.core.keygenerator.KeyGenerator;
@@ -157,8 +157,5 @@ public interface GenericDataType<T> {
    */
   GenericDataType<T> deepCopy();
 
-  void getChildrenType(List<ColumnType> type);
-
-  void getColumnNames(List<String> columnNameList);
-
+  void getComplexColumnInfo(List<ComplexColumnInfo> columnInfoList);
 }

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java b/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
index 5d22e55..c738bac 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/datatypes/PrimitiveDataType.java
@@ -34,6 +34,7 @@ import org.apache.carbondata.core.cache.dictionary.Dictionary;
 import org.apache.carbondata.core.cache.dictionary.DictionaryColumnUniqueIdentifier;
 import org.apache.carbondata.core.constants.CarbonCommonConstants;
 import org.apache.carbondata.core.datastore.ColumnType;
+import org.apache.carbondata.core.datastore.row.ComplexColumnInfo;
 import org.apache.carbondata.core.devapi.BiDictionary;
 import org.apache.carbondata.core.devapi.DictionaryGenerationException;
 import org.apache.carbondata.core.dictionary.client.DictionaryClient;
@@ -43,6 +44,7 @@ import org.apache.carbondata.core.keygenerator.KeyGenException;
 import org.apache.carbondata.core.keygenerator.KeyGenerator;
 import org.apache.carbondata.core.keygenerator.directdictionary.DirectDictionaryKeyGeneratorFactory;
 import org.apache.carbondata.core.metadata.AbsoluteTableIdentifier;
+import org.apache.carbondata.core.metadata.datatype.DataType;
 import org.apache.carbondata.core.metadata.datatype.DataTypes;
 import org.apache.carbondata.core.metadata.encoder.Encoding;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonColumn;
@@ -112,6 +114,8 @@ public class PrimitiveDataType implements GenericDataType<Object> {
 
   private boolean isDirectDictionary;
 
+  private DataType dataType;
+
   private PrimitiveDataType(int outputArrayIndex, int dataCounter) {
     this.outputArrayIndex = outputArrayIndex;
     this.dataCounter = dataCounter;
@@ -121,25 +125,25 @@ public class PrimitiveDataType implements GenericDataType<Object> {
    * constructor
    *
    * @param name
-   * @param parentname
+   * @param parentName
    * @param columnId
-   * @param dimensionOrdinal
    * @param isDictionary
    */
-  public PrimitiveDataType(String name, String parentname, String columnId, int dimensionOrdinal,
-      boolean isDictionary, String nullformat, boolean isEmptyBadRecord) {
+  public PrimitiveDataType(String name, DataType dataType, String parentName, String columnId,
+      boolean isDictionary, String nullFormat, boolean isEmptyBadRecord) {
     this.name = name;
-    this.parentname = parentname;
+    this.parentname = parentName;
     this.columnId = columnId;
     this.isDictionary = isDictionary;
-    this.nullformat = nullformat;
+    this.nullformat = nullFormat;
     this.isEmptyBadRecord = isEmptyBadRecord;
+    this.dataType = dataType;
   }
 
   /**
    * Constructor
    * @param carbonColumn
-   * @param parentname
+   * @param parentName
    * @param columnId
    * @param carbonDimension
    * @param absoluteTableIdentifier
@@ -149,17 +153,18 @@ public class PrimitiveDataType implements GenericDataType<Object> {
    * @param nullFormat
    * @param isEmptyBadRecords
    */
-  public PrimitiveDataType(CarbonColumn carbonColumn, String parentname, String columnId,
+  public PrimitiveDataType(CarbonColumn carbonColumn, String parentName, String columnId,
       CarbonDimension carbonDimension, AbsoluteTableIdentifier absoluteTableIdentifier,
       DictionaryClient client, Boolean useOnePass, Map<Object, Integer> localCache,
       String nullFormat, Boolean isEmptyBadRecords) {
     this.name = carbonColumn.getColName();
-    this.parentname = parentname;
+    this.parentname = parentName;
     this.columnId = columnId;
     this.carbonDimension = carbonDimension;
     this.isDictionary = isDictionaryDimension(carbonDimension);
     this.nullformat = nullFormat;
     this.isEmptyBadRecord = isEmptyBadRecords;
+    this.dataType = carbonColumn.getDataType();
 
     DictionaryColumnUniqueIdentifier identifier =
         new DictionaryColumnUniqueIdentifier(absoluteTableIdentifier,
@@ -537,14 +542,15 @@ public class PrimitiveDataType implements GenericDataType<Object> {
     dataType.setKeySize(this.keySize);
     dataType.setSurrogateIndex(this.index);
     dataType.name = this.name;
+    dataType.dataType = this.dataType;
     return dataType;
   }
 
-  public void getChildrenType(List<ColumnType> type) {
-    type.add(ColumnType.COMPLEX_PRIMITIVE);
+  @Override
+  public void getComplexColumnInfo(List<ComplexColumnInfo> columnInfoList) {
+    columnInfoList.add(
+        new ComplexColumnInfo(ColumnType.COMPLEX_PRIMITIVE, dataType,
+            name, !isDictionary));
   }
 
-  @Override public void getColumnNames(List<String> columnNameList) {
-    columnNameList.add(name);
-  }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/processing/src/main/java/org/apache/carbondata/processing/datatypes/StructDataType.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/datatypes/StructDataType.java b/processing/src/main/java/org/apache/carbondata/processing/datatypes/StructDataType.java
index 4d3ba87..29acf95 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/datatypes/StructDataType.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/datatypes/StructDataType.java
@@ -24,9 +24,11 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.carbondata.core.datastore.ColumnType;
+import org.apache.carbondata.core.datastore.row.ComplexColumnInfo;
 import org.apache.carbondata.core.devapi.DictionaryGenerationException;
 import org.apache.carbondata.core.keygenerator.KeyGenException;
 import org.apache.carbondata.core.keygenerator.KeyGenerator;
+import org.apache.carbondata.core.util.DataTypeUtil;
 import org.apache.carbondata.processing.loading.complexobjects.StructObject;
 import org.apache.carbondata.processing.loading.converter.BadRecordLogHolder;
 
@@ -323,17 +325,13 @@ public class StructDataType implements GenericDataType<StructObject> {
     return new StructDataType(childrenClone, this.outputArrayIndex, this.dataCounter, this.name);
   }
 
-  public void getChildrenType(List<ColumnType> type) {
-    type.add(ColumnType.COMPLEX_STRUCT);
-    for (int i = 0; i < children.size(); i++) {
-      children.get(i).getChildrenType(type);
-    }
-  }
-
-  @Override public void getColumnNames(List<String> columnNameList) {
-    columnNameList.add(name);
+  @Override
+  public void getComplexColumnInfo(List<ComplexColumnInfo> columnInfoList) {
+    columnInfoList.add(
+        new ComplexColumnInfo(ColumnType.COMPLEX_STRUCT, DataTypeUtil.valueOf("struct"),
+            name, false));
     for (int i = 0; i < children.size(); i++) {
-      children.get(i).getColumnNames(columnNameList);
+      children.get(i).getComplexColumnInfo(columnInfoList);
     }
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java b/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
index 2e65772..c46b2c2 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/store/TablePage.java
@@ -43,6 +43,7 @@ import org.apache.carbondata.core.datastore.page.statistics.LVLongStringStatsCol
 import org.apache.carbondata.core.datastore.page.statistics.LVShortStringStatsCollector;
 import org.apache.carbondata.core.datastore.page.statistics.PrimitivePageStatsCollector;
 import org.apache.carbondata.core.datastore.row.CarbonRow;
+import org.apache.carbondata.core.datastore.row.ComplexColumnInfo;
 import org.apache.carbondata.core.datastore.row.WriteStepRowUtil;
 import org.apache.carbondata.core.keygenerator.KeyGenException;
 import org.apache.carbondata.core.localdictionary.generator.LocalDictionaryGenerator;
@@ -234,20 +235,18 @@ public class TablePage {
 
     // initialize the page if first row
     if (rowId == 0) {
-      List<ColumnType> complexColumnType = new ArrayList<>();
-      List<String> columnNames = new ArrayList<>();
-      complexDataType.getChildrenType(complexColumnType);
-      complexDataType.getColumnNames(columnNames);
-      complexDimensionPages[index] = new ComplexColumnPage(complexColumnType);
+      List<ComplexColumnInfo> complexColumnInfoList = new ArrayList<>();
+      complexDataType.getComplexColumnInfo(complexColumnInfoList);
+      complexDimensionPages[index] = new ComplexColumnPage(complexColumnInfoList);
       try {
         complexDimensionPages[index]
-            .initialize(model.getColumnLocalDictGenMap(), columnNames, pageSize);
+            .initialize(model.getColumnLocalDictGenMap(), pageSize);
       } catch (MemoryException e) {
         throw new RuntimeException(e);
       }
     }
 
-    int depthInComplexColumn = complexDimensionPages[index].getDepth();
+    int depthInComplexColumn = complexDimensionPages[index].getComplexColumnIndex();
     // this is the result columnar data which will be added to page,
     // size of this list is the depth of complex column, we will fill it by input data
     List<ArrayList<byte[]>> encodedComplexColumnar = new ArrayList<>(depthInComplexColumn);

http://git-wip-us.apache.org/repos/asf/carbondata/blob/438b4421/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
----------------------------------------------------------------------
diff --git a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
index c1d5d90..10888f6 100644
--- a/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
+++ b/processing/src/main/java/org/apache/carbondata/processing/util/CarbonDataProcessorUtil.java
@@ -44,6 +44,7 @@ import org.apache.carbondata.core.metadata.schema.table.column.CarbonDimension;
 import org.apache.carbondata.core.metadata.schema.table.column.CarbonMeasure;
 import org.apache.carbondata.core.util.CarbonProperties;
 import org.apache.carbondata.core.util.CarbonUtil;
+import org.apache.carbondata.core.util.DataTypeUtil;
 import org.apache.carbondata.core.util.path.CarbonTablePath;
 import org.apache.carbondata.processing.datatypes.ArrayDataType;
 import org.apache.carbondata.processing.datatypes.GenericDataType;
@@ -383,9 +384,10 @@ public final class CarbonDataProcessorUtil {
         } else if (levelInfo[1].toLowerCase().contains(CarbonCommonConstants.STRUCT)) {
           g.addChildren(new StructDataType(levelInfo[0], levelInfo[2], levelInfo[3]));
         } else {
-          g.addChildren(new PrimitiveDataType(levelInfo[0], levelInfo[2], levelInfo[4],
-              Integer.parseInt(levelInfo[5]), levelInfo[3].contains("true"), nullFormat,
-              isEmptyBadRecord));
+          g.addChildren(
+              new PrimitiveDataType(levelInfo[0], DataTypeUtil.valueOf(levelInfo[1]),
+                  levelInfo[2], levelInfo[4], levelInfo[3].contains("true"), nullFormat,
+                  isEmptyBadRecord));
         }
       }
     }


Mime
View raw message