carbondata-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From jack...@apache.org
Subject [02/24] carbondata git commit: [CARBONDATA-1652] Add examples for Carbon usage when integrating with Spark
Date Wed, 08 Nov 2017 04:15:36 GMT
[CARBONDATA-1652] Add examples for Carbon usage when integrating with Spark

This closes #1442


Project: http://git-wip-us.apache.org/repos/asf/carbondata/repo
Commit: http://git-wip-us.apache.org/repos/asf/carbondata/commit/93a604dd
Tree: http://git-wip-us.apache.org/repos/asf/carbondata/tree/93a604dd
Diff: http://git-wip-us.apache.org/repos/asf/carbondata/diff/93a604dd

Branch: refs/heads/pre-aggregate
Commit: 93a604dd668b4bb84eacfecb4142be24c17a704f
Parents: 40eaa44
Author: Jacky Li <jacky.likun@qq.com>
Authored: Sat Oct 28 00:09:01 2017 +0530
Committer: chenliang613 <chenliang613@apache.org>
Committed: Sat Oct 28 19:46:28 2017 +0530

----------------------------------------------------------------------
 .../spark2/src/main/resources/complexdata.csv   | 101 +++++++++++++++++
 .../spark2/src/main/resources/dataSample.csv    |  11 ++
 .../spark2/src/main/resources/dimSample.csv     |  21 ++++
 .../spark2/src/main/resources/factSample.csv    |  51 +++++++++
 .../examples/AllDictionaryExample.scala         |  73 +++++++++++++
 .../carbondata/examples/AllDictionaryUtil.scala | 109 +++++++++++++++++++
 .../carbondata/examples/AlluxioExample.scala    |  64 +++++++++++
 .../examples/CarbonSessionExample.scala         |  28 +----
 .../examples/CaseClassDataFrameAPIExample.scala |  47 ++++++++
 .../examples/DataFrameAPIExample.scala          |  49 +++++++++
 .../carbondata/examples/ExampleUtils.scala      | 107 ++++++++++++++++++
 .../carbondata/examples/HadoopFileExample.scala |  52 +++++++++
 12 files changed, 688 insertions(+), 25 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/carbondata/blob/93a604dd/examples/spark2/src/main/resources/complexdata.csv
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/resources/complexdata.csv b/examples/spark2/src/main/resources/complexdata.csv
new file mode 100644
index 0000000..23a3949
--- /dev/null
+++ b/examples/spark2/src/main/resources/complexdata.csv
@@ -0,0 +1,101 @@
+deviceInformationId,channelsId,ROMSize,purchasedate,mobile,MAC,locationinfo,proddate,gamePointId,contractNumber
+1,109,4ROM size,29-11-2015,1AA1$2BB1,MAC1$MAC2$MAC3,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,29-11-2015$29-11-2015:29-11-2015,109,2738.562
+10,93,1ROM size,29-11-2015,1AA10$2BB10,MAC4$MAC5$MAC6,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,30-11-2015$30-11-2015:30-11-2015,93,1714.635
+100,2591,2ROM size,29-11-2015,1AA100$2BB100,MAC7$MAC8$MAC9,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,01-12-2015$01-12-2015:01-12-2015,2591,1271
+1000,2531,2ROM size,29-11-2015,1AA1000$2BB1000,MAC10$$MAC12,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New
Delhi:wuhan:hongshan:hongshan,02-12-2015$02-12-2015:02-12-2015,2531,692
+10000,2408,0ROM size,29-11-2015,1AA10000$2BB10000,MAC13$$MAC15,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong
Province:guangzhou:longhua:mingzhi,03-12-2015$03-12-2015:03-12-2015,2408,2175
+100000,1815,0ROM size,29-11-2015,1AA100000$2BB100000,MAC16$$MAC18,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New
Delhi:wuhan:hongshan:hongshan,04-12-2015$04-12-2015:04-12-2015,1815,136
+1000000,2479,4ROM size,29-11-2015,1AA1000000$2BB1000000,MAC19$$MAC21,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,05-12-2015$05-12-2015:05-12-2015,2479,1600
+100001,1845,7ROM size,29-11-2015,1AA100001$,MAC22$$MAC24,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,06-12-2015$06-12-2015:06-12-2015,1845,505
+100002,2008,1ROM size,29-11-2015,1AA100002$,MAC25$$MAC27,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,07-12-2015$07-12-2015:07-12-2015,2008,1341
+100003,1121,5ROM size,29-11-2015,1AA100003$,MAC28$$MAC30,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,08-12-2015$08-12-2015:08-12-2015,1121,2239
+100004,1511,8ROM size,29-11-2015,1AA100004$,MAC31$$MAC33,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,09-12-2015$09-12-2015:09-12-2015,1511,2970
+100005,2759,0ROM size,29-11-2015,1AA100005$,MAC34$$MAC36,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,10-12-2015$10-12-2015:10-12-2015,2759,2593
+100006,2069,7ROM size,29-11-2015,1AA100006$,MAC37$$MAC39,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,11-12-2015$11-12-2015:11-12-2015,2069,2572
+100007,396,7ROM size,29-11-2015,1AA100007$,MAC40$$MAC42,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,12-12-2015$12-12-2015:12-12-2015,396,1991
+100008,104,2ROM size,29-11-2015,1AA100008$,MAC43$$MAC45,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,13-12-2015$13-12-2015:13-12-2015,104,1442
+100009,477,3ROM size,29-11-2015,1AA100009$,MAC46$$MAC48,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,14-12-2015$14-12-2015:14-12-2015,477,1841
+10001,546,8ROM size,29-11-2015,1AA10001$2,MAC49$$MAC51,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,15-12-2015$15-12-2015:15-12-2015,546,298
+100010,2696,3ROM size,29-11-2015,1AA100010$2BB100010,MAC52$$MAC54,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,16-12-2015$16-12-2015:16-12-2015,2696,79
+100011,466,2ROM size,29-11-2015,1AA100011$2BB100011,MAC55$$MAC57,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong
Province:guangzhou:longhua:mingzhi,17-12-2015$17-12-2015:17-12-2015,466,202
+100012,2644,2ROM size,29-11-2015,1AA100012$2BB100012,MAC58$$MAC60,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,18-12-2015$18-12-2015:18-12-2015,2644,568
+100013,2167,3ROM size,29-11-2015,1AA100013$2BB100013,MAC61$MAC62,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,19-12-2015$19-12-2015:19-12-2015,2167,355
+100014,1069,7ROM size,29-11-2015,1AA100014$2BB100014,MAC64$MAC65,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,20-12-2015$20-12-2015:20-12-2015,1069,151
+100015,1447,9ROM size,29-11-2015,1AA100015$2BB100015,MAC67$MAC68,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,21-12-2015$21-12-2015:21-12-2015,1447,2863
+100016,2963,3ROM size,29-11-2015,1AA100016$2BB100016,MAC70$MAC71,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,22-12-2015$22-12-2015:22-12-2015,2963,1873
+100017,1580,5ROM size,29-11-2015,1AA100017$2BB100017,MAC73$MAC74,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,23-12-2015$23-12-2015:23-12-2015,1580,2205
+100018,446,2ROM size,29-11-2015,1AA100018$2BB100018,MAC76$MAC77,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,24-12-2015$24-12-2015:24-12-2015,446,441
+100019,2151,7ROM size,29-11-2015,1AA100019$2BB100019,MAC79$MAC80,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,25-12-2015$25-12-2015:25-12-2015,2151,2194
+10002,2201,1ROM size,29-11-2015,2BB10002,MAC82$MAC83,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New
Delhi:wuhan:hongshan:hongshan,26-12-2015$26-12-2015:26-12-2015,2201,2972
+100020,2574,5ROM size,29-11-2015,$2BB100020,MAC85$MAC86,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong
Province:shenzhen:longgang:matishan,27-12-2015$27-12-2015:27-12-2015,2574,256
+100021,1734,4ROM size,29-11-2015,$2BB100021,MAC88$MAC89,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,28-12-2015$28-12-2015:28-12-2015,1734,1778
+100022,155,3ROM size,29-11-2015,$2BB100022,MAC91$MAC92,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,29-12-2015$29-12-2015:29-12-2015,155,1999
+100023,1386,8ROM size,29-11-2015,$2BB100023,MAC94$MAC95,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong
Province:guangzhou:longhua:mingzhi,30-12-2015$30-12-2015:30-12-2015,1386,2194
+100024,1017,9ROM size,29-11-2015,$2BB100024,MAC97$MAC98,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,,1017,2483
+100025,47,2ROM size,29-11-2015,$2BB100025,$MAC101$MAC102,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong
Province:guangzhou:longhua:mingzhi,,47,1724
+100026,2930,7ROM size,29-11-2015,$2BB100026,$MAC104$MAC105,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,,2930,1768
+100027,2940,0ROM size,29-11-2015,$2BB100027,$MAC107$MAC108,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,,2940,2436
+100028,297,5ROM size,29-11-2015,$2BB100028,$MAC110$MAC111,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,,297,2849
+100029,1695,2ROM size,29-11-2015,$2BB100029,$MAC113$MAC114,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,,1695,1691
+10003,1326,7ROM size,29-11-2015,2BB10003,$MAC116$MAC117,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,,1326,2071
+100030,513,7ROM size,29-11-2015,$2BB100030,$MAC119$MAC120,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,07-01-2016$07-01-2016:,513,1333
+100031,1741,1ROM size,29-11-2015,$2BB100031,$MAC122$MAC123,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,08-01-2016$08-01-2016:,1741,1080
+100032,1198,0ROM size,29-11-2015,$2BB100032,$MAC125$MAC126,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong
Province:shenzhen:longgang:matishan,09-01-2016$09-01-2016:,1198,1053
+100033,273,9ROM size,29-11-2015,$2BB100033,$MAC128$MAC129,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New
Delhi:wuhan:hongshan:hongshan,10-01-2016$10-01-2016:,273,760
+100034,1234,6ROM size,29-11-2015,$2BB100034,$MAC131$MAC132,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong
Province:guangzhou:longhua:mingzhi,11-01-2016$11-01-2016:,1234,2061
+100035,1619,1ROM size,29-11-2015,$2BB100035,$MAC134$MAC135,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,12-01-2016$12-01-2016:,1619,2142
+100036,2415,2ROM size,29-11-2015,$2BB100036,$MAC137$MAC138,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,13-01-2016$13-01-2016:,2415,2224
+100037,2381,2ROM size,29-11-2015,$2BB100037,$MAC140$MAC141,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,14-01-2016$14-01-2016:,2381,1015
+100038,872,7ROM size,29-11-2015,1AA100038$2BB100038,$MAC143$MAC144,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong
Province:shenzhen:longgang:matishan,15-01-2016$15-01-2016,872,1229
+100039,1835,9ROM size,29-11-2015,1AA100039$2BB100039,$$MAC147,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong
Province:shenzhen:longgang:matishan,16-01-2016$16-01-2016,1835,1750
+10004,2597,1ROM size,29-11-2015,1AA10004$2BB10004,$$MAC150,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong
Province:guangzhou:longhua:mingzhi,17-01-2016$17-01-2016,2597,1717
+100040,1969,9ROM size,29-11-2015,1AA100040$2BB100040,$$MAC153,,18-01-2016$18-01-2016,1969,2078
+100041,2133,8ROM size,29-11-2015,$,$$MAC156,,19-01-2016$19-01-2016,2133,2734
+100042,631,9ROM size,29-11-2015,$,$$MAC159,,20-01-2016$20-01-2016,631,2745
+100043,187,4ROM size,29-11-2015,$,$$MAC162,2:Chinese::guangzhou:longhua:mingzhi$2:India::guangzhou:longhua:mingzhi,21-01-2016$21-01-2016,187,571
+100044,1232,5ROM size,29-11-2015,$,$$MAC165,2::Guangdong Province:guangzhou:longhua:mingzhi$2::Guangdong
Province:guangzhou:longhua:mingzhi,22-01-2016$22-01-2016,1232,1697
+100045,1602,6ROM size,29-11-2015,$,$$MAC168,4:Chinese:Hunan Province::xiangtan:jianshelu$4:India:Hunan
Province::xiangtan:jianshelu,23-01-2016$23-01-2016,1602,2553
+100046,2319,9ROM size,29-11-2015,$,$$MAC171,2:Chinese:Guangdong Province:guangzhou::mingzhi$2:India:Guangdong
Province:guangzhou::mingzhi,24-01-2016$24-01-2016,2319,1077
+100047,839,4ROM size,29-11-2015,$,$$MAC174,5:Chinese:Hunan Province:zhuzhou:tianyuan:$5:India:Hunan
Province:zhuzhou:tianyuan:,25-01-2016$25-01-2016,839,1823
+100048,1184,2ROM size,29-11-2015,$,$$MAC177,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong
Province:guangzhou:longhua:mingzhi,26-01-2016$:,1184,2399
+100049,2705,2ROM size,29-11-2015,$,$$MAC180,2:Chinese:Guangdong Province$2:India:Guangdong
Province,27-01-2016$:,2705,2890
+10005,1185,1ROM size,29-11-2015,,$$MAC183,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,28-01-2016$:,1185,1608
+100050,2457,9ROM size,29-11-2015,,$$MAC186,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,29-01-2016$:,2457,29
+100051,2320,8ROM size,29-11-2015,,$$MAC189,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong
Province:guangzhou:longhua:mingzhi,30-01-2016$:,2320,1407
+100052,2300,0ROM size,29-11-2015,,$$,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,31-01-2016$:,2300,845
+100053,1210,4ROM size,29-11-2015,,$$,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New
Delhi:wuhan:hongshan:hongshan,01-02-2016$:,1210,1655
+100054,1689,8ROM size,29-11-2015,,$$,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong
Province:shenzhen:longgang:matishan,02-02-2016$:,1689,1368
+100055,2823,2ROM size,29-11-2015,,$$,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,$03-02-2016:03-02-2016,2823,1728
+100056,68,6ROM size,29-11-2015,,$$,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New
Delhi:wuhan:hongshan:hongshan,$04-02-2016:04-02-2016,68,750
+100057,716,0ROM size,29-11-2015,,$$,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,$05-02-2016:05-02-2016,716,2288
+100058,864,6ROM size,29-11-2015,,$$,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong
Province:guangzhou:longhua:mingzhi,$06-02-2016:06-02-2016,864,2635
+100059,499,6ROM size,29-11-2015,,$$,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong
Province:shenzhen:longgang:matishan,$07-02-2016:07-02-2016,499,1337
+10006,1429,3ROM size,29-11-2015,,$$,:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$:India:Guangdong
Province:guangzhou:longhua:mingzhi,$08-02-2016:08-02-2016,1429,2478
+100060,2176,2ROM size,29-11-2015,,$$,:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$:India:Hunan
Province:xiangtan:xiangtan:jianshelu,$09-02-2016:09-02-2016,2176,538
+100061,2563,7ROM size,29-11-2015,,,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,$10-02-2016:10-02-2016,2563,1407
+100062,2594,3ROM size,29-11-2015,,,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,$11-02-2016:11-02-2016,2594,2952
+100063,2142,1ROM size,29-11-2015,,,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,$12-02-2016:12-02-2016,2142,1226
+100064,138,0ROM size,29-11-2015,1AA100064$2BB100064,,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,$13-02-2016:13-02-2016,138,865
+100065,1168,6ROM size,29-11-2015,1AA100065$2BB100065,,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,$14-02-2016:14-02-2016,1168,901
+100066,2828,5ROM size,29-11-2015,1AA100066$2BB100066,,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,$:,2828,1864
+100067,1160,0ROM size,29-11-2015,1AA100067$2BB100067,,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New
Delhi:wuhan:hongshan:hongshan,$:,1160,572
+100068,1890,6ROM size,29-11-2015,1AA100068$2BB100068,,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong
Province:guangzhou:longhua:mingzhi,$:,1890,412
+100069,1195,4ROM size,29-11-2015,1AA100069$2BB100069,,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,$:,1195,1491
+10007,2797,9ROM size,29-11-2015,1AA10007$2BB10007,,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,$:,2797,1350
+100070,44,5ROM size,29-11-2015,1AA100070$2BB100070,,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong
Province:guangzhou:longhua:mingzhi,$:,44,1567
+100071,1683,6ROM size,29-11-2015,1AA100071$2BB100071,,2:Chinese:Guangdong Province:guangzhou:longhua:mingzhi$2:India:Guangdong
Province:guangzhou:longhua:mingzhi,$:,1683,1973
+100072,1085,1ROM size,29-11-2015,1AA100072$2BB100072,,3:Chinese:Hunan Province:changsha:yuhua:shazitang$3:India:Hunan
Province:changsha:yuhua:shazitang,22-02-2016$22-02-2016:22-02-2016,1085,448
+100073,776,7ROM size,29-11-2015,1AA100073$2BB100073,,5:Chinese:Hunan Province:zhuzhou:tianyuan:tianyua$5:India:Hunan
Province:zhuzhou:tianyuan:tianyua,23-02-2016$23-02-2016:23-02-2016,776,2488
+100074,2074,9ROM size,29-11-2015,1AA100074$2BB100074,MAC262$MAC263$,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New
Delhi:wuhan:hongshan:hongshan,24-02-2016$24-02-2016:24-02-2016,2074,907
+100075,1062,2ROM size,29-11-2015,1AA100075$2BB100075,MAC265$MAC266$,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong
Province:shenzhen:longgang:matishan,25-02-2016$25-02-2016:25-02-2016,1062,2507
+100076,987,7ROM size,29-11-2015,1AA100076$2BB100076,MAC268$MAC269$,6:Chinese:Hubei Province:wuhan:hongshan:hongshan$6:India:New
Delhi:wuhan:hongshan:hongshan,26-02-2016$26-02-2016:26-02-2016,987,732
+100077,2799,9ROM size,29-11-2015,1AA100077$2BB100077,MAC271$MAC272$,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,27-02-2016$27-02-2016:27-02-2016,2799,2077
+100078,2765,1ROM size,29-11-2015,1AA100078$2BB100078,MAC274$MAC275$,7:Chinese:Hubei Province:yichang:yichang:yichang$7:India:New
Delhi:delhi:delhi:delhi,28-02-2016$28-02-2016:28-02-2016,2765,1434
+100079,2164,1ROM size,29-11-2015,1AA100079$2BB100079,MAC277$MAC278$,4:Chinese:Hunan Province:xiangtan:xiangtan:jianshelu$4:India:Hunan
Province:xiangtan:xiangtan:jianshelu,29-02-2016$29-02-2016:29-02-2016,2164,1098
+10008,1624,6ROM size,29-11-2015,1AA10008$2BB10008,MAC280$MAC281$,1:Chinese:Guangdong Province:shenzhen:longgang:matishan$1:India:Guangdong
Province:shenzhen:longgang:matishan,01-03-2016$01-03-2016:01-03-2016,1624,813
+100080,2355,1ROM size,29-11-2015,1AA100080$2BB100080,MAC283$MAC284$MAC285,1:Chinese:Guangdong
Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,02-03-2016$02-03-2016:02-03-2016,2355,954
+100081,1650,6ROM size,29-11-2015,1AA100081$2BB100081,MAC286$MAC287$MAC288,1:Chinese:Guangdong
Province:shenzhen:longgang:matishan$1:India:Guangdong Province:shenzhen:longgang:matishan,03-03-2016$03-03-2016:03-03-2016,1650,613
+100082,2761,3ROM size,29-11-2015,1AA100082$2BB100082,MAC289$MAC290$MAC291,4:Chinese:Hunan
Province:xiangtan:xiangtan:jianshelu$4:India:Hunan Province:xiangtan:xiangtan:jianshelu,04-03-2016$04-03-2016:04-03-2016,2761,2348
+100083,1856,3ROM size,29-11-2015,1AA100083$2BB100083,MAC292$MAC293$MAC294,5:Chinese:Hunan
Province:zhuzhou:tianyuan:tianyua$5:India:Hunan Province:zhuzhou:tianyuan:tianyua,05-03-2016$05-03-2016:05-03-2016,1856,2192
+100084,1841,7ROM size,29-11-2015,1AA100084$2BB100084,MAC295$MAC296$MAC297,2:Chinese:Guangdong
Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,06-03-2016$06-03-2016:06-03-2016,1841,2826
+100085,1841,7ROM size,29-11-2015,1AA100084$2BB100084,MAC295$MAC296$MAC297,2:Chinese:Guangdong
Province:guangzhou:longhua:mingzhi$2:India:Guangdong Province:guangzhou:longhua:mingzhi,06-03-2016$06-03-2016:06-03-2016,1841,2826

http://git-wip-us.apache.org/repos/asf/carbondata/blob/93a604dd/examples/spark2/src/main/resources/dataSample.csv
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/resources/dataSample.csv b/examples/spark2/src/main/resources/dataSample.csv
new file mode 100644
index 0000000..5696978
--- /dev/null
+++ b/examples/spark2/src/main/resources/dataSample.csv
@@ -0,0 +1,11 @@
+ID,date,country,name,phonetype,serialname,salary,floatField
+1,2015/7/23,china,aaa1,phone197,ASD69643,15000,2.34
+2,2015/7/24,china,aaa2,phone756,ASD42892,15001,2.34
+3,2015/7/25,china,aaa3,phone1904,ASD37014,15002,2.34
+4,2015/7/26,china,aaa4,phone2435,ASD66902,15003,2.34
+5,2015/7/27,china,aaa5,phone2441,ASD90633,15004,2.34
+6,2015/7/28,china,aaa6,phone294,ASD59961,15005,3.5
+7,2015/7/29,china,aaa7,phone610,ASD14875,15006,2.34
+8,2015/7/30,china,aaa8,phone1848,ASD57308,15007,2.34
+9,2015/7/18,china,aaa9,phone706,ASD86717,15008,2.34
+10,2015/7/19,usa,aaa10,phone685,ASD30505,15009,2.34
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/93a604dd/examples/spark2/src/main/resources/dimSample.csv
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/resources/dimSample.csv b/examples/spark2/src/main/resources/dimSample.csv
new file mode 100644
index 0000000..0c8f27a
--- /dev/null
+++ b/examples/spark2/src/main/resources/dimSample.csv
@@ -0,0 +1,21 @@
+id,name,city
+1,David,Beijing
+2,Mark,Paris
+3,Bill,NewYork
+4,Sara,Tokyo
+5,John,Beijing
+6,Michel,Chicago
+7,Robert,Houston
+8,Sunny,Boston
+9,Mary,Tokyo
+10,Edward,Paris
+11,James,Washington
+12,Maria,Berlin
+13,Adam,Athens
+14,Peter,Boston
+15,George,Paris
+16,Paul,Shanghai
+17,Lisa,Hangzhou
+18,Angel,Beijing
+19,Emily,Bangalore
+20,Kevin,Singapore
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/93a604dd/examples/spark2/src/main/resources/factSample.csv
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/resources/factSample.csv b/examples/spark2/src/main/resources/factSample.csv
new file mode 100644
index 0000000..9693156
--- /dev/null
+++ b/examples/spark2/src/main/resources/factSample.csv
@@ -0,0 +1,51 @@
+id,name,city,salary
+1,David,Beijing,15000
+1,David,Tokyo,20000
+1,David,Hangzhou,18000
+2,Mark,Paris,12000
+2,Mark,Boston,15000
+2,Mark,Chicago,18000
+3,Bill,NewYork,20000
+3,Bill,Boston,23000
+4,Sara,Tokyo,11000
+4,Sara,Paris,15000
+4,Sara,Chicago,21000
+4,Sara,Hangzhou,17000
+5,John,Beijing,15000
+5,John,Shanghai,16000
+6,Michel,Chicago,11000
+6,Michel,Boston,12000
+6,Michel,Tokyo,11000
+8,Sunny,Boston,14000
+8,Sunny,Beijing,22000
+8,Sunny,Tokyo,20000
+9,Mary,Tokyo,13000
+9,Mary,NewYork,18000
+9,Mary,Paris,16000
+9,Mary,Washington,20000
+9,Mary,Boston,17000
+10,Edward,Paris,20000
+10,Edward,Beijing,12000
+10,Edward,Berlin,15000
+11,James,Washington,16000
+12,Maria,Berlin,15000
+12,Maria,Beijing,16000
+13,Adam,Athens,21000
+13,Adam,Berlin,18000
+13,Adam,Hangzhou,17000
+14,Peter,Boston,20000
+14,Peter,Berlin,21000
+14,Peter,Shanghai,18000
+15,George,Paris,17000
+15,George,Tokyo,12000
+15,George,Beijing,15000
+15,George,Berlin,18000
+16,Paul,Shanghai,22000
+16,Paul,Tokyo,19000
+16,Paul,Paris,24000
+16,Paul,Hangzhou,22000
+18,Angel,Beijing,22000
+18,Angel,NewYork,25000
+18,Angel,Tokyo,22000
+20,Kevin,Singapore,18000
+20,Kevin,Bangalore,16000
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/carbondata/blob/93a604dd/examples/spark2/src/main/scala/org/apache/carbondata/examples/AllDictionaryExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/AllDictionaryExample.scala
b/examples/spark2/src/main/scala/org/apache/carbondata/examples/AllDictionaryExample.scala
new file mode 100644
index 0000000..2f337f4
--- /dev/null
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/AllDictionaryExample.scala
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+object AllDictionaryExample {
+
+  def main(args: Array[String]) {
+    val spark = ExampleUtils.createCarbonSession("AllDictionaryExample")
+    val testData = ExampleUtils.currentPath + "/src/main/resources/dataSample.csv"
+    val csvHeader = "ID,date,country,name,phonetype,serialname,salary"
+    val dictCol = "|date|country|name|phonetype|serialname|"
+    val allDictFile = ExampleUtils.currentPath + "/src/main/resources/data.dictionary"
+    // extract all dictionary files from source data
+    AllDictionaryUtil.extractDictionary(spark.sparkContext,
+      testData, allDictFile, csvHeader, dictCol)
+    // Specify date format based on raw data
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
+
+    spark.sql("DROP TABLE IF EXISTS t3")
+
+    spark.sql(
+      s"""
+         | CREATE TABLE IF NOT EXISTS t3(
+         | ID Int,
+         | date Date,
+         | country String,
+         | name String,
+         | phonetype String,
+         | serialname String,
+         | salary Int,
+         | floatField float
+         | ) STORED BY 'carbondata'
+       """.stripMargin)
+
+    spark.sql(s"""
+           LOAD DATA LOCAL INPATH '$testData' into table t3
+           options('ALL_DICTIONARY_PATH'='$allDictFile', 'SINGLE_PASS'='true')
+           """)
+
+    spark.sql("""
+           SELECT * FROM t3
+           """).show()
+
+    spark.sql("""
+           SELECT * FROM t3 where floatField=3.5
+           """).show()
+
+    spark.sql("DROP TABLE IF EXISTS t3")
+
+    // clean local dictionary files
+    AllDictionaryUtil.cleanDictionary(allDictFile)
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/93a604dd/examples/spark2/src/main/scala/org/apache/carbondata/examples/AllDictionaryUtil.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/AllDictionaryUtil.scala
b/examples/spark2/src/main/scala/org/apache/carbondata/examples/AllDictionaryUtil.scala
new file mode 100644
index 0000000..50d26aa
--- /dev/null
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/AllDictionaryUtil.scala
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import java.io.DataOutputStream
+
+import scala.collection.mutable.{ArrayBuffer, HashSet}
+
+import org.apache.spark.SparkContext
+
+import org.apache.carbondata.common.logging.LogServiceFactory
+import org.apache.carbondata.core.datastore.impl.FileFactory
+
+object AllDictionaryUtil {
+  private val LOGGER = LogServiceFactory.getLogService(this.getClass.getCanonicalName)
+  def extractDictionary(sc: SparkContext,
+                        srcData: String,
+                        outputPath: String,
+                        fileHeader: String,
+                        dictCol: String): Unit = {
+    val fileHeaderArr = fileHeader.split(",")
+    val isDictCol = new Array[Boolean](fileHeaderArr.length)
+    for (i <- 0 until fileHeaderArr.length) {
+      if (dictCol.contains("|" + fileHeaderArr(i).toLowerCase() + "|")) {
+        isDictCol(i) = true
+      } else {
+        isDictCol(i) = false
+      }
+    }
+    val dictionaryRdd = sc.textFile(srcData).flatMap(x => {
+      val tokens = x.split(",")
+      val result = new ArrayBuffer[(Int, String)]()
+      for (i <- 0 until isDictCol.length) {
+        if (isDictCol(i)) {
+          try {
+            result += ((i, tokens(i)))
+          } catch {
+            case ex: ArrayIndexOutOfBoundsException =>
+              LOGGER.error("Read a bad record: " + x)
+          }
+        }
+      }
+      result
+    }).groupByKey().flatMap(x => {
+      val distinctValues = new HashSet[(Int, String)]()
+      for (value <- x._2) {
+        distinctValues.add(x._1, value)
+      }
+      distinctValues
+    })
+    val dictionaryValues = dictionaryRdd.map(x => x._1 + "," + x._2).collect()
+    saveToFile(dictionaryValues, outputPath)
+  }
+
+  def cleanDictionary(outputPath: String): Unit = {
+    try {
+      val fileType = FileFactory.getFileType(outputPath)
+      val file = FileFactory.getCarbonFile(outputPath, fileType)
+      if (file.exists()) {
+        file.delete()
+      }
+    } catch {
+      case ex: Exception =>
+        LOGGER.error("Clean dictionary catching exception:" + ex)
+    }
+  }
+
+  def saveToFile(contents: Array[String], outputPath: String): Unit = {
+    var writer: DataOutputStream = null
+    try {
+      val fileType = FileFactory.getFileType(outputPath)
+      val file = FileFactory.getCarbonFile(outputPath, fileType)
+      if (!file.exists()) {
+        file.createNewFile()
+      }
+      writer = FileFactory.getDataOutputStream(outputPath, fileType)
+      for (content <- contents) {
+        writer.writeBytes(content + "\n")
+      }
+    } catch {
+      case ex: Exception =>
+        LOGGER.error("Save dictionary to file catching exception:" + ex)
+    } finally {
+      if (writer != null) {
+        try {
+          writer.close()
+        } catch {
+          case ex: Exception =>
+            LOGGER.error("Close output stream catching exception:" + ex)
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/93a604dd/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
b/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
new file mode 100644
index 0000000..6c183a5
--- /dev/null
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/AlluxioExample.scala
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.datastore.impl.FileFactory
+import org.apache.carbondata.core.util.CarbonProperties
+
+/**
+ * configure alluxio:
+ * 1.start alluxio
+ * 2.upload the jar :"/alluxio_path/core/client/target/
+ * alluxio-core-client-YOUR-VERSION-jar-with-dependencies.jar"
+ * 3.Get more detail at:http://www.alluxio.org/docs/master/en/Running-Spark-on-Alluxio.html
+ */
+
+object AlluxioExample {
+  def main(args: Array[String]) {
+    val spark = ExampleUtils.createCarbonSession("AlluxioExample")
+    spark.sparkContext.hadoopConfiguration.set("fs.alluxio.impl", "alluxio.hadoop.FileSystem")
+    FileFactory.getConfiguration.set("fs.alluxio.impl", "alluxio.hadoop.FileSystem")
+
+    // Specify date format based on raw data
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
+
+    spark.sql("DROP TABLE IF EXISTS t3")
+
+    spark.sql("""
+           CREATE TABLE IF NOT EXISTS t3
+           (ID Int, date Date, country String,
+           name String, phonetype String, serialname String, salary Int)
+           STORED BY 'carbondata'
+           """)
+
+    spark.sql(s"""
+           LOAD DATA LOCAL INPATH 'alluxio://localhost:19998/data.csv' into table t3
+           """)
+
+    spark.sql("""
+           SELECT country, count(salary) AS amount
+           FROM t3
+           WHERE country IN ('china','france')
+           GROUP BY country
+           """).show()
+
+    spark.sql("DROP TABLE IF EXISTS t3")
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/93a604dd/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
index c0429b5..a42b366 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CarbonSessionExample.scala
@@ -19,34 +19,10 @@ package org.apache.carbondata.examples
 
 import java.io.File
 
-import org.apache.spark.sql.SparkSession
-
-import org.apache.carbondata.core.constants.CarbonCommonConstants
-import org.apache.carbondata.core.util.CarbonProperties
-
 object CarbonSessionExample {
 
   def main(args: Array[String]) {
-    val rootPath = new File(this.getClass.getResource("/").getPath
-                            + "../../../..").getCanonicalPath
-    val storeLocation = s"$rootPath/examples/spark2/target/store"
-    val warehouse = s"$rootPath/examples/spark2/target/warehouse"
-    val metastoredb = s"$rootPath/examples/spark2/target"
-
-    CarbonProperties.getInstance()
-      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd HH:mm:ss")
-      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
-      .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE_LOADING, "true")
-
-    import org.apache.spark.sql.CarbonSession._
-    val spark = SparkSession
-      .builder()
-      .master("local")
-      .appName("CarbonSessionExample")
-      .config("spark.sql.warehouse.dir", warehouse)
-      .config("spark.driver.host", "localhost")
-      .getOrCreateCarbonSession(storeLocation)
-
+    val spark = ExampleUtils.createCarbonSession("CarbonSessionExample")
     spark.sparkContext.setLogLevel("WARN")
 
     spark.sql("DROP TABLE IF EXISTS carbon_table")
@@ -71,6 +47,8 @@ object CarbonSessionExample {
          | TBLPROPERTIES('SORT_COLUMNS'='', 'DICTIONARY_INCLUDE'='dateField, charField')
        """.stripMargin)
 
+    val rootPath = new File(this.getClass.getResource("/").getPath
+                            + "../../../..").getCanonicalPath
     val path = s"$rootPath/examples/spark2/src/main/resources/data.csv"
 
     // scalastyle:off

http://git-wip-us.apache.org/repos/asf/carbondata/blob/93a604dd/examples/spark2/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
new file mode 100644
index 0000000..c926817
--- /dev/null
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/CaseClassDataFrameAPIExample.scala
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.spark.rdd.RDD
+import org.apache.spark.sql.{DataFrame, SaveMode}
+
+case class People(name: String, occupation: String, id: Int)
+
+object CaseClassDataFrameAPIExample {
+
+  def main(args: Array[String]) {
+    val spark = ExampleUtils.createCarbonSession("CaseClassDataFrameAPIExample")
+
+    val people = List(People("sangeeta", "engineer", 1), People("pallavi", "consultant",
2))
+    val peopleRDD: RDD[People] = spark.sparkContext.parallelize(people)
+    import spark.implicits._
+    val peopleDF: DataFrame = peopleRDD.toDF("name", "occupation", "id")
+
+    // writing data to carbon table
+    peopleDF.write
+      .format("carbondata")
+      .option("tableName", "carbon2")
+      .option("compress", "true")
+      .mode(SaveMode.Overwrite)
+      .save()
+
+    spark.sql("SELECT * FROM carbon2").show()
+
+    spark.sql("DROP TABLE IF EXISTS carbon2")
+  }
+}

http://git-wip-us.apache.org/repos/asf/carbondata/blob/93a604dd/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataFrameAPIExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataFrameAPIExample.scala
b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataFrameAPIExample.scala
new file mode 100644
index 0000000..7a7e74a
--- /dev/null
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/DataFrameAPIExample.scala
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+// scalastyle:off println
+object DataFrameAPIExample {
+
+  def main(args: Array[String]) {
+    val spark = ExampleUtils.createCarbonSession("DataFrameAPIExample")
+    ExampleUtils.writeSampleCarbonFile(spark, "carbon1", 1000)
+
+    import spark.implicits._
+
+    // use datasource api to read
+    val in = spark.read
+      .format("carbondata")
+      .option("tableName", "carbon1")
+      .load()
+    var count = in.where($"c3" > 500).select($"*").count()
+    println(s"count after 1 load: $count")
+
+    // append new data, query answer should be 1000
+    ExampleUtils.appendSampleCarbonFile(spark, "carbon1")
+    count = in.where($"c3" > 500).select($"*").count()
+    println(s"count after 2 load: $count")
+
+    // use SQL to read
+    spark.sql("SELECT c1, count(c3) FROM carbon1 where c3 > 500 group by c1 limit 10").show
+
+    // delete carbondata file
+    ExampleUtils.cleanSampleCarbonFile(spark, "carbon1")
+  }
+}
+// scalastyle:on println

http://git-wip-us.apache.org/repos/asf/carbondata/blob/93a604dd/examples/spark2/src/main/scala/org/apache/carbondata/examples/ExampleUtils.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/ExampleUtils.scala
b/examples/spark2/src/main/scala/org/apache/carbondata/examples/ExampleUtils.scala
new file mode 100644
index 0000000..a48ed6a
--- /dev/null
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/ExampleUtils.scala
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import java.io.File
+
+import org.apache.spark.sql.{SaveMode, SparkSession}
+
+import org.apache.carbondata.core.constants.CarbonCommonConstants
+import org.apache.carbondata.core.util.CarbonProperties
+
+// scalastyle:off println
+
+object ExampleUtils {
+
+  def currentPath: String = new File(this.getClass.getResource("/").getPath + "../../")
+      .getCanonicalPath
+  val storeLocation: String = currentPath + "/target/store"
+
+  def createCarbonSession(appName: String): SparkSession = {
+    val rootPath = new File(this.getClass.getResource("/").getPath
+                            + "../../../..").getCanonicalPath
+    val storeLocation = s"$rootPath/examples/spark2/target/store"
+    val warehouse = s"$rootPath/examples/spark2/target/warehouse"
+    val metastoredb = s"$rootPath/examples/spark2/target"
+
+    CarbonProperties.getInstance()
+      .addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "yyyy/MM/dd HH:mm:ss")
+      .addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "yyyy/MM/dd")
+      .addProperty(CarbonCommonConstants.ENABLE_UNSAFE_COLUMN_PAGE_LOADING, "true")
+
+    import org.apache.spark.sql.CarbonSession._
+    val spark = SparkSession
+      .builder()
+      .master("local")
+      .appName("CarbonSessionExample")
+      .config("spark.sql.warehouse.dir", warehouse)
+      .config("spark.driver.host", "localhost")
+      .getOrCreateCarbonSession(storeLocation)
+    spark.sparkContext.setLogLevel("WARN")
+    spark
+  }
+
+  /**
+   * This func will write a sample CarbonData file containing following schema:
+   * c1: String, c2: String, c3: Double
+   * Returns table path
+   */
+  def writeSampleCarbonFile(spark: SparkSession, tableName: String, numRows: Int = 1000):
String = {
+    spark.sql(s"DROP TABLE IF EXISTS $tableName")
+    writeDataframe(spark, tableName, numRows, SaveMode.Overwrite)
+    s"$storeLocation/default/$tableName"
+  }
+
+  /**
+   * This func will append data to the CarbonData file
+   * Returns table path
+   */
+  def appendSampleCarbonFile(
+      spark: SparkSession, tableName: String, numRows: Int = 1000): String = {
+    writeDataframe(spark, tableName, numRows, SaveMode.Append)
+    s"$storeLocation/default/$tableName"
+  }
+
+  /**
+   * create a new dataframe and write to CarbonData file, based on save mode
+   */
+  private def writeDataframe(
+      spark: SparkSession, tableName: String, numRows: Int, mode: SaveMode): Unit = {
+    // use CarbonContext to write CarbonData files
+    import spark.implicits._
+    val sc = spark.sparkContext
+    val df = sc.parallelize(1 to numRows, 2)
+        .map(x => ("a", "b", x))
+        .toDF("c1", "c2", "c3")
+
+    // save dataframe directl to carbon file without tempCSV
+    df.write
+      .format("carbondata")
+      .option("tableName", tableName)
+      .option("compress", "true")
+      .option("tempCSV", "false")
+      .mode(mode)
+      .save()
+  }
+
+  def cleanSampleCarbonFile(spark: SparkSession, tableName: String): Unit = {
+    spark.sql(s"DROP TABLE IF EXISTS $tableName")
+  }
+}
+// scalastyle:on println
+

http://git-wip-us.apache.org/repos/asf/carbondata/blob/93a604dd/examples/spark2/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
----------------------------------------------------------------------
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
b/examples/spark2/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
new file mode 100644
index 0000000..d14469a
--- /dev/null
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/HadoopFileExample.scala
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.carbondata.examples
+
+import org.apache.hadoop.conf.Configuration
+
+import org.apache.carbondata.hadoop.{CarbonInputFormat, CarbonProjection}
+
+// scalastyle:off println
+object HadoopFileExample {
+
+  def main(args: Array[String]): Unit = {
+    val spark = ExampleUtils.createCarbonSession("HadoopFileExample")
+    ExampleUtils.writeSampleCarbonFile(spark, "carbon1")
+
+    // read two columns
+    val projection = new CarbonProjection
+    projection.addColumn("c1")  // column c1
+    projection.addColumn("c3")  // column c3
+    val conf = new Configuration()
+    CarbonInputFormat.setColumnProjection(conf, projection)
+
+    val sc = spark.sparkContext
+    val input = sc.newAPIHadoopFile(s"${ExampleUtils.storeLocation}/default/carbon1",
+      classOf[CarbonInputFormat[Array[Object]]],
+      classOf[Void],
+      classOf[Array[Object]],
+      conf)
+    val result = input.map(x => x._2.toList).collect
+    result.foreach(x => println(x.mkString(", ")))
+
+    // delete carbondata file
+    ExampleUtils.cleanSampleCarbonFile(spark, "carbon1")
+  }
+}
+// scalastyle:on println
+


Mime
View raw message