DRILL-1133: Default cp and dfs storage plugin instances do not appear in Drill Web UI after installing Drill.
* Modified StoragePluginRegistry to scan for ALL 'bootstrap-storage-plugins.json' files in the classpath instead of only the first one.
* Moved 'hbase' and 'hive' plugin instance definitions to their own module from 'java-exec'.
* Renamed the test storage plugin instances to 'dfs_test' and 'hive_test' and updated the test cases to reflect this change.
Project: http://git-wip-us.apache.org/repos/asf/incubator-drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-drill/commit/699851b8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-drill/tree/699851b8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-drill/diff/699851b8
Branch: refs/heads/master
Commit: 699851b8d791f61c687fa3aac94340c78beb92da
Parents: 8289f72
Author: Aditya Kishore <aditya@maprtech.com>
Authored: Sat Jul 12 17:23:11 2014 -0700
Committer: Aditya Kishore <aditya@maprtech.com>
Committed: Sat Jul 12 17:44:23 2014 -0700
----------------------------------------------------------------------
.../resources/bootstrap-storage-plugins.json | 12 +++
.../resources/bootstrap-storage-plugins.json | 12 ---
.../resources/bootstrap-storage-plugins.json | 11 +++
.../org/apache/drill/exec/ExecConstants.java | 1 +
.../drill/exec/store/StoragePluginRegistry.java | 46 ++++++-----
.../resources/bootstrap-storage-plugins.json | 30 ++-----
.../org/apache/drill/TestAltSortQueries.java | 8 +-
.../org/apache/drill/TestExampleQueries.java | 8 +-
.../org/apache/drill/TestProjectPushDown.java | 4 +-
.../drill/exec/TestQueriesOnLargeFile.java | 2 +-
.../physical/impl/writer/TestParquetWriter.java | 2 +-
.../exec/physical/impl/writer/TestWriter.java | 14 ++--
.../apache/drill/exec/sql/TestViewSupport.java | 4 +-
.../store/parquet/ParquetRecordReaderTest.java | 2 +-
.../drill/exec/store/text/TestTextColumn.java | 6 +-
.../complex/writer/TestComplexToJson.java | 4 +-
.../resources/bootstrap-storage-plugins.json | 56 +++++--------
.../resources/parquet/parquet_nullable.json | 6 +-
.../parquet/parquet_nullable_varlen.json | 6 +-
.../resources/parquet/parquet_scan_screen.json | 6 +-
.../src/test/resources/queries/tpch/15.sql | 2 +-
.../src/test/resources/store/text/test.json | 2 +-
.../drill/jdbc/test/TestFunctionsQuery.java | 24 +++---
.../apache/drill/jdbc/test/TestHiveStorage.java | 16 ++--
.../drill/jdbc/test/TestJdbcDistQuery.java | 40 ++++-----
.../apache/drill/jdbc/test/TestJdbcQuery.java | 12 +--
.../apache/drill/jdbc/test/TestMetadataDDL.java | 87 ++++++++++----------
.../org/apache/drill/jdbc/test/TestViews.java | 54 ++++++------
.../resources/bootstrap-storage-plugins.json | 62 +++-----------
29 files changed, 245 insertions(+), 294 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/contrib/storage-hbase/src/main/resources/bootstrap-storage-plugins.json
----------------------------------------------------------------------
diff --git a/contrib/storage-hbase/src/main/resources/bootstrap-storage-plugins.json b/contrib/storage-hbase/src/main/resources/bootstrap-storage-plugins.json
new file mode 100644
index 0000000..3e0e8c0
--- /dev/null
+++ b/contrib/storage-hbase/src/main/resources/bootstrap-storage-plugins.json
@@ -0,0 +1,12 @@
+{
+ "storage":{
+ hbase : {
+ type:"hbase",
+ enabled: false,
+ config : {
+ "hbase.zookeeper.quorum" : "localhost",
+ "hbase.zookeeper.property.clientPort" : 2181
+ }
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/contrib/storage-hbase/src/test/resources/bootstrap-storage-plugins.json
----------------------------------------------------------------------
diff --git a/contrib/storage-hbase/src/test/resources/bootstrap-storage-plugins.json b/contrib/storage-hbase/src/test/resources/bootstrap-storage-plugins.json
deleted file mode 100644
index 3e0e8c0..0000000
--- a/contrib/storage-hbase/src/test/resources/bootstrap-storage-plugins.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
- "storage":{
- hbase : {
- type:"hbase",
- enabled: false,
- config : {
- "hbase.zookeeper.quorum" : "localhost",
- "hbase.zookeeper.property.clientPort" : 2181
- }
- }
- }
-}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/contrib/storage-hive/core/src/main/resources/bootstrap-storage-plugins.json
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/resources/bootstrap-storage-plugins.json b/contrib/storage-hive/core/src/main/resources/bootstrap-storage-plugins.json
index ac61ffd..946e2b6 100644
--- a/contrib/storage-hive/core/src/main/resources/bootstrap-storage-plugins.json
+++ b/contrib/storage-hive/core/src/main/resources/bootstrap-storage-plugins.json
@@ -1,4 +1,15 @@
{
"storage":{
+ hive : {
+ type:"hive",
+ enabled: false,
+ config : {
+ "hive.metastore.uris" : "",
+ "javax.jdo.option.ConnectionURL" : "jdbc:derby:;databaseName=../../sample-data/drill_hive_db;create=true",
+ "hive.metastore.warehouse.dir" : "/tmp/drill_hive_wh",
+ "fs.default.name" : "file:///",
+ "hive.metastore.sasl.enabled" : "false"
+ }
+ }
}
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index c2f459e..15c3b88 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -131,4 +131,5 @@ public interface ExecConstants {
public static final String QUEUE_TIMEOUT_KEY = "exec.queue.timeout_millis";
public static final OptionValidator QUEUE_TIMEOUT = new PositiveLongValidator(QUEUE_TIMEOUT_KEY, Long.MAX_VALUE, 60*1000*5);
+ public static final String BOOTSTRAP_STORAGE_PLUGINS_FILE = "bootstrap-storage-plugins.json";
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
index ca1cfe8..0b773dd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
@@ -142,30 +142,35 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage
}
private Map<String, StoragePlugin> createPlugins() throws DrillbitStartupException {
- /*
- * Check if the storage plugins system table has any entries. If not, load the boostrap-storage-plugin file into the system table.
- */
- Map<String, StoragePlugin> activePlugins = new HashMap<String, StoragePlugin>();
-
try {
-
+ /*
+ * Check if the storage plugins system table has any entries. If not, load the boostrap-storage-plugin file into the system table.
+ */
if (!pluginSystemTable.iterator().hasNext()) {
// bootstrap load the config since no plugins are stored.
- URL url = Resources.class.getClassLoader().getResource("bootstrap-storage-plugins.json");
- logger.info("Bootstrap loading the storage plugin configs from URL {}.", url);
- if (url != null) {
- String pluginsData = Resources.toString(url, Charsets.UTF_8);
- StoragePlugins plugins = context.getConfig().getMapper().readValue(pluginsData, StoragePlugins.class);
-
- for (Map.Entry<String, StoragePluginConfig> config : plugins) {
- pluginSystemTable.put(config.getKey(), config.getValue());
+ logger.info("No storage plugin instances configured in persistent store, loading bootstrap configuration.");
+ Collection<URL> urls = PathScanner.forResource(ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE, false, Resources.class.getClassLoader());
+ if (urls != null && ! urls.isEmpty()) {
+ logger.info("Loading the storage plugin configs from URLs {}.", urls);
+ Map<String, URL> pluginURLMap = Maps.newHashMap();
+ for (URL url :urls) {
+ String pluginsData = Resources.toString(url, Charsets.UTF_8);
+ StoragePlugins plugins = context.getConfig().getMapper().readValue(pluginsData, StoragePlugins.class);
+ for (Map.Entry<String, StoragePluginConfig> config : plugins) {
+ if (!pluginSystemTable.putIfAbsent(config.getKey(), config.getValue())) {
+ logger.warn("Duplicate plugin instance '{}' defined in [{}, {}], ignoring the later one.",
+ config.getKey(), pluginURLMap.get(config.getKey()), url);
+ continue;
+ }
+ pluginURLMap.put(config.getKey(), url);
+ }
}
-
} else {
- throw new IOException("Failure finding bootstrap-storage-plugins.json");
+ throw new IOException("Failure finding " + ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE);
}
}
+ Map<String, StoragePlugin> activePlugins = new HashMap<String, StoragePlugin>();
for (Map.Entry<String, StoragePluginConfig> config : pluginSystemTable) {
try {
if (config.getValue().isEnabled()) {
@@ -177,15 +182,14 @@ public class StoragePluginRegistry implements Iterable<Map.Entry<String, Storage
}
}
+ activePlugins.put(INFORMATION_SCHEMA_PLUGIN, new InfoSchemaStoragePlugin(new InfoSchemaConfig(), context, INFORMATION_SCHEMA_PLUGIN));
+ activePlugins.put(SYS_PLUGIN, new SystemTablePlugin(SystemTablePluginConfig.INSTANCE, context, SYS_PLUGIN));
+
+ return activePlugins;
} catch (IOException e) {
logger.error("Failure setting up storage plugins. Drillbit exiting.", e);
throw new IllegalStateException(e);
}
-
- activePlugins.put(INFORMATION_SCHEMA_PLUGIN, new InfoSchemaStoragePlugin(new InfoSchemaConfig(), context, INFORMATION_SCHEMA_PLUGIN));
- activePlugins.put(SYS_PLUGIN, new SystemTablePlugin(SystemTablePluginConfig.INSTANCE, context, SYS_PLUGIN));
-
- return activePlugins;
}
public void deletePlugin(String name) {
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json b/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json
index e6623e9..eadb1bc 100644
--- a/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json
+++ b/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json
@@ -41,29 +41,15 @@
cp: {
type: "file",
- connection: "classpath:///"
- },
-
- hive : {
- type:"hive",
- enabled: false,
- config : {
- "hive.metastore.uris" : "",
- "javax.jdo.option.ConnectionURL" : "jdbc:derby:;databaseName=../../sample-data/drill_hive_db;create=true",
- "hive.metastore.warehouse.dir" : "/tmp/drill_hive_wh",
- "fs.default.name" : "file:///",
- "hive.metastore.sasl.enabled" : "false"
- }
- },
-
- hbase : {
- type:"hbase",
- enabled: false,
- config : {
- "hbase.zookeeper.quorum" : "localhost",
- "hbase.zookeeper.property.clientPort" : 2181
+ connection: "classpath:///",
+ formats: {
+ "json" : {
+ type: "json"
+ },
+ "parquet" : {
+ type: "parquet"
+ }
}
}
-
}
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/java/org/apache/drill/TestAltSortQueries.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestAltSortQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestAltSortQueries.java
index 119bd48..de1b49a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestAltSortQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestAltSortQueries.java
@@ -30,14 +30,14 @@ public class TestAltSortQueries extends BaseTestQuery{
@Test
public void testOrderBy() throws Exception{
test("select R_REGIONKEY " +
- "from dfs.`[WORKING_PATH]/../../sample-data/region.parquet` " +
+ "from dfs_test.`[WORKING_PATH]/../../sample-data/region.parquet` " +
"order by R_REGIONKEY");
}
@Test
public void testOrderBySingleFile() throws Exception{
test("select R_REGIONKEY " +
- "from dfs.`[WORKING_PATH]/../../sample-data/regionsSF/` " +
+ "from dfs_test.`[WORKING_PATH]/../../sample-data/regionsSF/` " +
"order by R_REGIONKEY");
}
@@ -57,9 +57,9 @@ public class TestAltSortQueries extends BaseTestQuery{
" nations.N_NAME,\n" +
" regions.R_NAME\n" +
"FROM\n" +
- " dfs.`[WORKING_PATH]/../../sample-data/nation.parquet` nations\n" +
+ " dfs_test.`[WORKING_PATH]/../../sample-data/nation.parquet` nations\n" +
"JOIN\n" +
- " dfs.`[WORKING_PATH]/../../sample-data/region.parquet` regions\n" +
+ " dfs_test.`[WORKING_PATH]/../../sample-data/region.parquet` regions\n" +
" on nations.N_REGIONKEY = regions.R_REGIONKEY" +
" order by regions.R_NAME, nations.N_NAME " +
" limit 5");
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
index 250528f..21526ed 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestExampleQueries.java
@@ -98,14 +98,14 @@ public class TestExampleQueries extends BaseTestQuery{
@Test
public void testText() throws Exception {
String root = FileUtils.getResourceAsFile("/store/text/data/regions.csv").toURI().toString();
- String query = String.format("select * from dfs.`%s`", root);
+ String query = String.format("select * from dfs_test.`%s`", root);
test(query);
}
@Test
public void testTextPartitions() throws Exception {
String root = FileUtils.getResourceAsFile("/store/text/data/").toURI().toString();
- String query = String.format("select * from dfs.`%s`", root);
+ String query = String.format("select * from dfs_test.`%s`", root);
test(query);
}
@@ -116,9 +116,9 @@ public class TestExampleQueries extends BaseTestQuery{
" nations.N_NAME,\n" +
" regions.R_NAME\n" +
"FROM\n" +
- " dfs.`[WORKING_PATH]/../../sample-data/nation.parquet` nations\n" +
+ " dfs_test.`[WORKING_PATH]/../../sample-data/nation.parquet` nations\n" +
"JOIN\n" +
- " dfs.`[WORKING_PATH]/../../sample-data/region.parquet` regions\n" +
+ " dfs_test.`[WORKING_PATH]/../../sample-data/region.parquet` regions\n" +
" on nations.N_REGIONKEY = regions.R_REGIONKEY where 1 = 0");
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java b/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java
index d6c92e0..ec8e92e 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java
@@ -75,9 +75,9 @@ public class TestProjectPushDown extends PlanTestBase {
testPhysicalPlan("SELECT\n" + " nations.N_NAME,\n" + " regions.R_NAME\n"
+ "FROM\n"
- + " dfs.`[WORKING_PATH]/../../sample-data/nation.parquet` nations\n"
+ + " dfs_test.`[WORKING_PATH]/../../sample-data/nation.parquet` nations\n"
+ "JOIN\n"
- + " dfs.`[WORKING_PATH]/../../sample-data/region.parquet` regions\n"
+ + " dfs_test.`[WORKING_PATH]/../../sample-data/region.parquet` regions\n"
+ " on nations.N_REGIONKEY = regions.R_REGIONKEY", expectedColNames1,
expectedColNames2);
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/java/org/apache/drill/exec/TestQueriesOnLargeFile.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/TestQueriesOnLargeFile.java b/exec/java-exec/src/test/java/org/apache/drill/exec/TestQueriesOnLargeFile.java
index 09aa6c4..ec6c251 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/TestQueriesOnLargeFile.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/TestQueriesOnLargeFile.java
@@ -72,7 +72,7 @@ public class TestQueriesOnLargeFile extends BaseTestQuery {
@Test
public void testRead() throws Exception {
List<QueryResultBatch> results = testSqlWithResults(
- String.format("SELECT count(*) FROM dfs.`default`.`%s`", dataFile.getPath()));
+ String.format("SELECT count(*) FROM dfs_test.`default`.`%s`", dataFile.getPath()));
RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
index a5170ed..aa2b66f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
@@ -193,7 +193,7 @@ public class TestParquetWriter extends BaseTestQuery {
fs.delete(path, true);
}
- test("use dfs.tmp");
+ test("use dfs_test.tmp");
String query = String.format("SELECT %s FROM %s", selection, inputTable);
String create = "CREATE TABLE " + outputFile + " AS " + query;
String validateQuery = String.format("SELECT %s FROM " + outputFile, validationSelection);
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
index 65843a6..1dc8192 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
@@ -94,7 +94,7 @@ public class TestWriter extends BaseTestQuery {
@Test
public void simpleCTAS() throws Exception {
- runSQL("Use dfs.tmp");
+ runSQL("Use dfs_test.tmp");
runSQL(ALTER_SESSION);
String testQuery = "CREATE TABLE simplectas AS SELECT * FROM cp.`employee.json`";
@@ -104,7 +104,7 @@ public class TestWriter extends BaseTestQuery {
@Test
public void complex1CTAS() throws Exception {
- runSQL("Use dfs.tmp");
+ runSQL("Use dfs_test.tmp");
runSQL(ALTER_SESSION);
String testQuery = "CREATE TABLE complex1ctas AS SELECT first_name, last_name, position_id FROM cp.`employee.json`";
@@ -113,7 +113,7 @@ public class TestWriter extends BaseTestQuery {
@Test
public void complex2CTAS() throws Exception {
- runSQL("Use dfs.tmp");
+ runSQL("Use dfs_test.tmp");
runSQL(ALTER_SESSION);
String testQuery = "CREATE TABLE complex2ctas AS SELECT CAST(`birth_date` as Timestamp) FROM cp.`employee.json` GROUP BY birth_date";
@@ -123,16 +123,16 @@ public class TestWriter extends BaseTestQuery {
@Test
public void simpleCTASWithSchemaInTableName() throws Exception {
runSQL(ALTER_SESSION);
- String testQuery = "CREATE TABLE dfs.tmp.`/test/simplectas2` AS SELECT * FROM cp.`employee.json`";
+ String testQuery = "CREATE TABLE dfs_test.tmp.`/test/simplectas2` AS SELECT * FROM cp.`employee.json`";
ctasHelper("/tmp/drilltest/test/simplectas2", testQuery, 1155);
}
@Test
public void simpleParquetDecimal() throws Exception {
-// String testQuery = "CREATE TABLE dfs.tmp.`simpleparquetdecimal` AS SELECT full_name FROM cp.`employee.json`";
- String testQuery = "CREATE TABLE dfs.tmp.`simpleparquetdecimal` AS SELECT cast(salary as decimal(30,2)) * -1 as salary FROM cp.`employee.json`";
-// String testQuery = "select * from dfs.tmp.`simpleparquetdecimal`";
+// String testQuery = "CREATE TABLE dfs_test.tmp.`simpleparquetdecimal` AS SELECT full_name FROM cp.`employee.json`";
+ String testQuery = "CREATE TABLE dfs_test.tmp.`simpleparquetdecimal` AS SELECT cast(salary as decimal(30,2)) * -1 as salary FROM cp.`employee.json`";
+// String testQuery = "select * from dfs_test.tmp.`simpleparquetdecimal`";
ctasHelper("/tmp/drilltest/simpleparquetdecimal", testQuery, 1155);
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java
index 5c034e5..585e931 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java
@@ -25,12 +25,12 @@ public class TestViewSupport extends BaseTestQuery{
@Test
public void referToSchemaInsideAndOutsideView() throws Exception {
- String use = "use dfs.tmp;";
+ String use = "use dfs_test.tmp;";
String selectInto = "create table monkey as select c_custkey, c_regionkey from cp.`tpch/customer.parquet`";
String createView = "create view myMonkeyView as select c_custkey, c_regionkey from monkey";
String selectInside = "select * from myMonkeyView;";
String use2 = "use cp;";
- String selectOutside = "select * from dfs.tmp.myMonkeyView;";
+ String selectOutside = "select * from dfs_test.tmp.myMonkeyView;";
test(use);
test(selectInto);
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java
index fbe037b..3e679bb 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetRecordReaderTest.java
@@ -354,7 +354,7 @@ public class ParquetRecordReaderTest extends BaseTestQuery{
@Test
public void testReadError_Drill_901() throws Exception {
- // select cast( L_COMMENT as varchar) from dfs.`/tmp/drilltest/employee_parquet`
+ // select cast( L_COMMENT as varchar) from dfs_test.`/tmp/drilltest/employee_parquet`
HashMap<String, FieldInfo> fields = new HashMap<>();
ParquetTestProperties props = new ParquetTestProperties(1, 120350, DEFAULT_BYTES_PER_PAGE, fields);
testParquetFullEngineEventBased(false, false, "/parquet/par_writer_test.json", null,
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TestTextColumn.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TestTextColumn.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TestTextColumn.java
index 395ec02..e16f781 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TestTextColumn.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/text/TestTextColumn.java
@@ -36,13 +36,13 @@ public class TestTextColumn extends BaseTestQuery{
@Test
public void testCsvColumnSelection() throws Exception{
- test("select columns[0] as region_id, columns[1] as country from dfs.`[WORKING_PATH]/src/test/resources/store/text/data/regions.csv`");
+ test("select columns[0] as region_id, columns[1] as country from dfs_test.`[WORKING_PATH]/src/test/resources/store/text/data/regions.csv`");
}
@Test
public void testDefaultDelimiterColumnSelection() throws Exception {
List<QueryResultBatch> batches = testSqlWithResults("SELECT columns[0] as entire_row " +
- "from dfs.`[WORKING_PATH]/src/test/resources/store/text/data/letters.txt`");
+ "from dfs_test.`[WORKING_PATH]/src/test/resources/store/text/data/letters.txt`");
List<List<String>> expectedOutput = Arrays.asList(
Arrays.asList("\"a, b,\",\"c\",\"d,, \\n e\""),
@@ -56,7 +56,7 @@ public class TestTextColumn extends BaseTestQuery{
@Test
public void testCsvColumnSelectionCommasInsideQuotes() throws Exception {
List<QueryResultBatch> batches = testSqlWithResults("SELECT columns[0] as col1, columns[1] as col2, columns[2] as col3," +
- "columns[3] as col4 from dfs.`[WORKING_PATH]/src/test/resources/store/text/data/letters.csv`");
+ "columns[3] as col4 from dfs_test.`[WORKING_PATH]/src/test/resources/store/text/data/letters.csv`");
List<List<String>> expectedOutput = Arrays.asList(
Arrays.asList("\"a, b,\"", "\"c\"", "\"d,, \\n e\"","\"f\\\"g\""),
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexToJson.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexToJson.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexToJson.java
index 0cffc88..7c7ce2d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexToJson.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexToJson.java
@@ -42,7 +42,7 @@ public class TestComplexToJson extends BaseTestQuery {
client = new DrillClient(config, serviceSet.getCoordinator());
client.setSupportComplexTypes(false);
client.connect();
- results = testSqlWithResults("select * from dfs.`[WORKING_PATH]/src/test/resources/store/text/data/regions.csv`");
+ results = testSqlWithResults("select * from dfs_test.`[WORKING_PATH]/src/test/resources/store/text/data/regions.csv`");
loader.load(results.get(0).getHeader().getDef(), results.get(0).getData());
RecordBatchDef def = results.get(0).getHeader().getDef();
// the entire row is returned as a single column
@@ -58,7 +58,7 @@ public class TestComplexToJson extends BaseTestQuery {
client = new DrillClient(config, serviceSet.getCoordinator());
client.setSupportComplexTypes(true);
client.connect();
- results = testSqlWithResults("select * from dfs.`[WORKING_PATH]/src/test/resources/store/text/data/regions.csv`");
+ results = testSqlWithResults("select * from dfs_test.`[WORKING_PATH]/src/test/resources/store/text/data/regions.csv`");
loader.load(results.get(0).getHeader().getDef(), results.get(0).getData());
def = results.get(0).getHeader().getDef();
// the entire row is returned as a single column
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/resources/bootstrap-storage-plugins.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/bootstrap-storage-plugins.json b/exec/java-exec/src/test/resources/bootstrap-storage-plugins.json
index 534b120..35ee717 100644
--- a/exec/java-exec/src/test/resources/bootstrap-storage-plugins.json
+++ b/exec/java-exec/src/test/resources/bootstrap-storage-plugins.json
@@ -1,6 +1,6 @@
{
"storage":{
- dfs: {
+ dfs_test: {
type: "file",
connection: "file:///",
workspaces: {
@@ -15,42 +15,30 @@
}
},
formats: {
- "psv" : {
- type: "text",
- extensions: [ "tbl" ],
- delimiter: "|"
- },
- "csv" : {
- type: "text",
- extensions: [ "csv", "bcp" ],
- delimiter: ","
- },
- "tsv" : {
- type: "text",
- extensions: [ "tsv" ],
- delimiter: "\t"
- },
- "json" : {
- type: "json"
- },
- "parquet" : {
- type: "parquet"
- },
- "txt" : {
- type : "text",
- extensions: [ "txt" ]
- }
- }
- },
- cp: {
- type: "file",
- connection: "classpath:///",
- formats: {
- "json" : {
- type: "json"
+ "psv" : {
+ type: "text",
+ extensions: [ "tbl" ],
+ delimiter: "|"
+ },
+ "csv" : {
+ type: "text",
+ extensions: [ "csv", "bcp" ],
+ delimiter: ","
+ },
+ "tsv" : {
+ type: "text",
+ extensions: [ "tsv" ],
+ delimiter: "\t"
},
"parquet" : {
type: "parquet"
+ },
+ "json" : {
+ type: "json"
+ },
+ "txt" : {
+ type : "text",
+ extensions: [ "txt" ]
}
}
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/resources/parquet/parquet_nullable.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/parquet/parquet_nullable.json b/exec/java-exec/src/test/resources/parquet/parquet_nullable.json
index 3e09f83..56963a8 100644
--- a/exec/java-exec/src/test/resources/parquet/parquet_nullable.json
+++ b/exec/java-exec/src/test/resources/parquet/parquet_nullable.json
@@ -8,10 +8,10 @@
}
},
storage:{
- "dfs" :
+ "dfs_test" :
{
"type":"named",
- "name":"dfs"
+ "name":"dfs_test"
}
},
query:[
@@ -19,7 +19,7 @@
@id:"1",
op:"scan",
memo:"initial_scan",
- storageengine:"dfs",
+ storageengine:"dfs_test",
selection: {
format: {type: "parquet"},
files: [
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/resources/parquet/parquet_nullable_varlen.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/parquet/parquet_nullable_varlen.json b/exec/java-exec/src/test/resources/parquet/parquet_nullable_varlen.json
index 9547f00..ea81b6c 100644
--- a/exec/java-exec/src/test/resources/parquet/parquet_nullable_varlen.json
+++ b/exec/java-exec/src/test/resources/parquet/parquet_nullable_varlen.json
@@ -8,10 +8,10 @@
}
},
storage:{
- "dfs" :
+ "dfs_test" :
{
"type":"named",
- "name":"dfs"
+ "name":"dfs_test"
}
},
query:[
@@ -19,7 +19,7 @@
@id:"1",
op:"scan",
memo:"initial_scan",
- storageengine:"dfs",
+ storageengine:"dfs_test",
selection: {
format: {type: "parquet"},
files: [
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/resources/parquet/parquet_scan_screen.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/parquet/parquet_scan_screen.json b/exec/java-exec/src/test/resources/parquet/parquet_scan_screen.json
index 50281f4..ebb0b24 100644
--- a/exec/java-exec/src/test/resources/parquet/parquet_scan_screen.json
+++ b/exec/java-exec/src/test/resources/parquet/parquet_scan_screen.json
@@ -8,10 +8,10 @@
}
},
storage:{
- "dfs" :
+ "dfs_test" :
{
"type":"named",
- "name":"dfs"
+ "name":"dfs_test"
}
},
query:[
@@ -19,7 +19,7 @@
@id:"1",
op:"scan",
memo:"initial_scan",
- storageengine:"dfs",
+ storageengine:"dfs_test",
selection: {
format: {type: "parquet"},
files: [
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/resources/queries/tpch/15.sql
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/queries/tpch/15.sql b/exec/java-exec/src/test/resources/queries/tpch/15.sql
index dff842d..2f0aa8e 100644
--- a/exec/java-exec/src/test/resources/queries/tpch/15.sql
+++ b/exec/java-exec/src/test/resources/queries/tpch/15.sql
@@ -1,5 +1,5 @@
-- tpch15 using 1395599672 as a seed to the RNG
-use dfs.tmp; -- views can only be created in dfs schema
+use dfs_test.tmp; -- views can only be created in dfs schema
create view revenue0 (supplier_no, total_revenue) as
select
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/java-exec/src/test/resources/store/text/test.json
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/resources/store/text/test.json b/exec/java-exec/src/test/resources/store/text/test.json
index ee33b5d..fbf19a4 100644
--- a/exec/java-exec/src/test/resources/store/text/test.json
+++ b/exec/java-exec/src/test/resources/store/text/test.json
@@ -15,7 +15,7 @@
],
storage : {
type : "named",
- name: "dfs"
+ name: "dfs_test"
},
format: {
type: "named",
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestFunctionsQuery.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestFunctionsQuery.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestFunctionsQuery.java
index 53eef9c..3a8bcbc 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestFunctionsQuery.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestFunctionsQuery.java
@@ -44,7 +44,7 @@ public class TestFunctionsQuery {
"abs(cast('-12345678912345678912.4567' as decimal(28, 5))) DEC28_ABS_2, " +
"abs(cast('1234567891234567891234567891234567891.4' as decimal(38, 1))) DEC38_ABS_1, " +
"abs(cast('-1234567891234567891234567891234567891.4' as decimal(38, 1))) DEC38_ABS_2 " +
- "FROM dfs.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
+ "FROM dfs_test.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
JdbcAssert.withNoDefaultSchema()
.sql(query)
@@ -82,7 +82,7 @@ public class TestFunctionsQuery {
"ceil(cast('1234567891234567891234567891234567891.0' as decimal(38, 1))) DEC38_3, " +
"ceil(cast('-1234567891234567891234567891234567891.4' as decimal(38, 1))) DEC38_4, " +
"ceil(cast('-1234567891234567891234567891234567891.0' as decimal(38, 1))) DEC38_5 " +
- "FROM dfs.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
+ "FROM dfs_test.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
JdbcAssert.withNoDefaultSchema()
.sql(query)
@@ -130,7 +130,7 @@ public class TestFunctionsQuery {
"floor(cast('1234567891234567891234567891234567891.0' as decimal(38, 1))) DEC38_3, " +
"floor(cast('-1234567891234567891234567891234567891.4' as decimal(38, 1))) DEC38_4, " +
"floor(cast('-999999999999999999999999999999999999.4' as decimal(38, 1))) DEC38_5 " +
- "FROM dfs.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
+ "FROM dfs_test.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
JdbcAssert.withNoDefaultSchema()
.sql(query)
@@ -177,7 +177,7 @@ public class TestFunctionsQuery {
"trunc(cast('1234567891234567891234567891234567891.0' as decimal(38, 1))) DEC38_3, " +
"trunc(cast('-1234567891234567891234567891234567891.4' as decimal(38, 1))) DEC38_4, " +
"trunc(cast('-999999999999999999999999999999999999.4' as decimal(38, 1))) DEC38_5 " +
- "FROM dfs.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
+ "FROM dfs_test.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
JdbcAssert.withNoDefaultSchema()
.sql(query)
@@ -224,7 +224,7 @@ public class TestFunctionsQuery {
"trunc(cast('999999999.1234' as decimal(38, 4)), 12) DEC38_3, " +
"trunc(cast('-123456789123456789.4' as decimal(38, 1)), 10) DEC38_4, " +
"trunc(cast('-999999999999999999999999999999999999.4' as decimal(38, 1)), 1) DEC38_5 " +
- "FROM dfs.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
+ "FROM dfs_test.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
JdbcAssert.withNoDefaultSchema()
.sql(query)
@@ -263,7 +263,7 @@ public class TestFunctionsQuery {
"trunc(-1234, -4) as T_8, " +
"trunc(8124674407369523212, 0) as T_9, " +
"trunc(81246744073695.395, 1) as T_10 " +
- "FROM dfs.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
+ "FROM dfs_test.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
JdbcAssert.withNoDefaultSchema()
.sql(query)
@@ -303,7 +303,7 @@ public class TestFunctionsQuery {
"round(cast('1234567891234567891234567891234567891.2' as decimal(38, 1))) DEC38_4, " +
"round(cast('-1234567891234567891234567891234567891.4' as decimal(38, 1))) DEC38_5, " +
"round(cast('-999999999999999999999999999999999999.9' as decimal(38, 1))) DEC38_6 " +
- "FROM dfs.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
+ "FROM dfs_test.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
JdbcAssert.withNoDefaultSchema()
.sql(query)
@@ -354,7 +354,7 @@ public class TestFunctionsQuery {
"round(cast('999999999.9999999995678' as decimal(38, 18)), 21) DEC38_5, " +
"round(cast('-1234567891234567891234567891234567891.4' as decimal(38, 1)), 1) DEC38_6, " +
"round(cast('-999999999999999999999999999999999999.9' as decimal(38, 1)), 0) DEC38_7 " +
- "FROM dfs.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
+ "FROM dfs_test.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
JdbcAssert.withNoDefaultSchema()
.sql(query)
@@ -396,7 +396,7 @@ public class TestFunctionsQuery {
"round(-1234, -4) as T_8, " +
"round(8124674407369523212, -4) as T_9, " +
"round(81246744073695.395, 1) as T_10 " +
- "FROM dfs.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
+ "FROM dfs_test.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
JdbcAssert.withNoDefaultSchema()
.sql(query)
@@ -423,7 +423,7 @@ public class TestFunctionsQuery {
"to_char(cast('99999912399.9567' as decimal(18, 5)), '#.#####') DEC18_1, " +
"to_char(cast('12345678912345678912.5567' as decimal(28, 5)), '#,###.#####') DEC28_1, " +
"to_char(cast('999999999999999999999999999.5' as decimal(38, 1)), '#.#') DEC38_1 " +
- "FROM dfs.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
+ "FROM dfs_test.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
JdbcAssert.withNoDefaultSchema()
.sql(query)
@@ -442,7 +442,7 @@ public class TestFunctionsQuery {
public void testConcatFunction() throws Exception {
String query = String.format("SELECT " +
"concat('1234', ' COL_VALUE ', R_REGIONKEY, ' - STRING') as STR_1 " +
- "FROM dfs.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
+ "FROM dfs_test.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
JdbcAssert.withNoDefaultSchema()
.sql(query)
@@ -456,7 +456,7 @@ public class TestFunctionsQuery {
public void testTimeStampConstant() throws Exception {
String query = String.format("SELECT " +
"timestamp '2008-2-23 12:23:23' as TS " +
- "FROM dfs.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
+ "FROM dfs_test.`%s/../../sample-data/region.parquet` limit 1", WORKING_PATH);
JdbcAssert.withNoDefaultSchema()
.sql(query)
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestHiveStorage.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestHiveStorage.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestHiveStorage.java
index 1f70942..86359e0 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestHiveStorage.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestHiveStorage.java
@@ -30,29 +30,29 @@ public class TestHiveStorage extends JdbcTestQueryBase{
@Test
public void testHiveReadWithDb() throws Exception{
- testQuery("select * from hive.`default`.kv");
- testQuery("select key from hive.`default`.kv group by key");
+ testQuery("select * from hive_test.`default`.kv");
+ testQuery("select key from hive_test.`default`.kv group by key");
}
@Test
public void testHiveWithDate() throws Exception {
- testQuery("select * from hive.`default`.foodate");
- testQuery("select date_add(a, time '12:23:33'), b from hive.`default`.foodate");
+ testQuery("select * from hive_test.`default`.foodate");
+ testQuery("select date_add(a, time '12:23:33'), b from hive_test.`default`.foodate");
}
@Test
public void testQueryEmptyHiveTable() throws Exception {
- testQuery("SELECT * FROM hive.`default`.empty_table");
+ testQuery("SELECT * FROM hive_test.`default`.empty_table");
}
@Test
public void testReadAllSupportedHiveDataTypes() throws Exception {
// There are known issues with displaying VarBinary in JDBC. So for now just execute the query and do not
// verify the results until display issues with VarBinary are resolved.
- testQuery("SELECT * FROM hive.`default`.readtest");
+ testQuery("SELECT * FROM hive_test.`default`.readtest");
/*
- JdbcAssert.withFull("hive.default")
+ JdbcAssert.withFull("hive_test.default")
.sql("SELECT * FROM readtest")
.returns(
"binary_field=[B@7005f08f; " + // know issues with binary display
@@ -86,7 +86,7 @@ public class TestHiveStorage extends JdbcTestQueryBase{
@Test
public void testOrderByOnHiveTable() throws Exception {
- JdbcAssert.withFull("hive.default")
+ JdbcAssert.withFull("hive_test.default")
.sql("SELECT * FROM kv ORDER BY `value` DESC")
.returns(
"key=5; value= key_5\n" +
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcDistQuery.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcDistQuery.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcDistQuery.java
index 93cfce3..39ba043 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcDistQuery.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcDistQuery.java
@@ -56,27 +56,27 @@ public class TestJdbcDistQuery extends JdbcTest{
@Test
public void testSimpleQuerySingleFile() throws Exception{
testQuery(String.format("select R_REGIONKEY, R_NAME "
- + "from dfs.`%s/../../sample-data/regionsSF/`", WORKING_PATH));
+ + "from dfs_test.`%s/../../sample-data/regionsSF/`", WORKING_PATH));
}
@Test
public void testSimpleQueryMultiFile() throws Exception{
testQuery(String.format("select R_REGIONKEY, R_NAME "
- + "from dfs.`%s/../../sample-data/regionsMF/`", WORKING_PATH));
+ + "from dfs_test.`%s/../../sample-data/regionsMF/`", WORKING_PATH));
}
@Test
public void testWhereOverSFile() throws Exception{
testQuery(String.format("select R_REGIONKEY, R_NAME "
- + "from dfs.`%s/../../sample-data/regionsSF/` "
+ + "from dfs_test.`%s/../../sample-data/regionsSF/` "
+ "WHERE R_REGIONKEY = 1", WORKING_PATH));
}
@Test
public void testWhereOverMFile() throws Exception{
testQuery(String.format("select R_REGIONKEY, R_NAME "
- + "from dfs.`%s/../../sample-data/regionsMF/` "
+ + "from dfs_test.`%s/../../sample-data/regionsMF/` "
+ "WHERE R_REGIONKEY = 1", WORKING_PATH));
}
@@ -84,28 +84,28 @@ public class TestJdbcDistQuery extends JdbcTest{
@Test
public void testAggSingleFile() throws Exception{
testQuery(String.format("select R_REGIONKEY "
- + "from dfs.`%s/../../sample-data/regionsSF/` "
+ + "from dfs_test.`%s/../../sample-data/regionsSF/` "
+ "group by R_REGIONKEY", WORKING_PATH));
}
@Test
public void testAggMultiFile() throws Exception{
testQuery(String.format("select R_REGIONKEY "
- + "from dfs.`%s/../../sample-data/regionsMF/` "
+ + "from dfs_test.`%s/../../sample-data/regionsMF/` "
+ "group by R_REGIONKEY", WORKING_PATH));
}
@Test
public void testAggOrderByDiffGKeyMultiFile() throws Exception{
testQuery(String.format("select R_REGIONKEY, SUM(cast(R_REGIONKEY AS int)) As S "
- + "from dfs.`%s/../../sample-data/regionsMF/` "
+ + "from dfs_test.`%s/../../sample-data/regionsMF/` "
+ "group by R_REGIONKEY ORDER BY S", WORKING_PATH));
}
@Test
public void testAggOrderBySameGKeyMultiFile() throws Exception{
testQuery(String.format("select R_REGIONKEY, SUM(cast(R_REGIONKEY AS int)) As S "
- + "from dfs.`%s/../../sample-data/regionsMF/` "
+ + "from dfs_test.`%s/../../sample-data/regionsMF/` "
+ "group by R_REGIONKEY "
+ "ORDER BY R_REGIONKEY", WORKING_PATH));
}
@@ -113,24 +113,24 @@ public class TestJdbcDistQuery extends JdbcTest{
@Test
public void testJoinSingleFile() throws Exception{
testQuery(String.format("select T1.R_REGIONKEY "
- + "from dfs.`%s/../../sample-data/regionsSF/` as T1 "
- + "join dfs.`%s/../../sample-data/nationsSF/` as T2 "
+ + "from dfs_test.`%s/../../sample-data/regionsSF/` as T1 "
+ + "join dfs_test.`%s/../../sample-data/nationsSF/` as T2 "
+ "on T1.R_REGIONKEY = T2.N_REGIONKEY", WORKING_PATH, WORKING_PATH));
}
@Test
public void testJoinMultiFile() throws Exception{
testQuery(String.format("select T1.R_REGIONKEY "
- + "from dfs.`%s/../../sample-data/regionsMF/` as T1 "
- + "join dfs.`%s/../../sample-data/nationsMF/` as T2 "
+ + "from dfs_test.`%s/../../sample-data/regionsMF/` as T1 "
+ + "join dfs_test.`%s/../../sample-data/nationsMF/` as T2 "
+ "on T1.R_REGIONKEY = T2.N_REGIONKEY", WORKING_PATH, WORKING_PATH));
}
@Test
public void testJoinMFileWhere() throws Exception{
testQuery(String.format("select T1.R_REGIONKEY, T1.R_NAME "
- + "from dfs.`%s/../../sample-data/regionsMF/` as T1 "
- + "join dfs.`%s/../../sample-data/nationsMF/` as T2 "
+ + "from dfs_test.`%s/../../sample-data/regionsMF/` as T1 "
+ + "join dfs_test.`%s/../../sample-data/nationsMF/` as T2 "
+ "on T1.R_REGIONKEY = T2.N_REGIONKEY "
+ "WHERE T1.R_REGIONKEY = 3 ", WORKING_PATH, WORKING_PATH));
}
@@ -139,7 +139,7 @@ public class TestJdbcDistQuery extends JdbcTest{
//NPE at ExternalSortBatch.java : 151
public void testSortSingleFile() throws Exception{
testQuery(String.format("select R_REGIONKEY "
- + "from dfs.`%s/../../sample-data/regionsSF/` "
+ + "from dfs_test.`%s/../../sample-data/regionsSF/` "
+ "order by R_REGIONKEY", WORKING_PATH));
}
@@ -147,14 +147,14 @@ public class TestJdbcDistQuery extends JdbcTest{
//NPE at ExternalSortBatch.java : 151
public void testSortMultiFile() throws Exception{
testQuery(String.format("select R_REGIONKEY "
- + "from dfs.`%s/../../sample-data/regionsMF/` "
+ + "from dfs_test.`%s/../../sample-data/regionsMF/` "
+ "order by R_REGIONKEY", WORKING_PATH));
}
@Test
public void testSortMFileWhere() throws Exception{
testQuery(String.format("select R_REGIONKEY "
- + "from dfs.`%s/../../sample-data/regionsMF/` "
+ + "from dfs_test.`%s/../../sample-data/regionsMF/` "
+ "WHERE R_REGIONKEY = 1 "
+ "order by R_REGIONKEY ", WORKING_PATH ));
}
@@ -162,8 +162,8 @@ public class TestJdbcDistQuery extends JdbcTest{
@Test
public void testJoinAggSortWhere() throws Exception{
testQuery(String.format("select T1.R_REGIONKEY, COUNT(1) as CNT "
- + "from dfs.`%s/../../sample-data/regionsMF/` as T1 "
- + "join dfs.`%s/../../sample-data/nationsMF/` as T2 "
+ + "from dfs_test.`%s/../../sample-data/regionsMF/` as T1 "
+ + "join dfs_test.`%s/../../sample-data/nationsMF/` as T2 "
+ "on T1.R_REGIONKEY = T2.N_REGIONKEY "
+ "WHERE T1.R_REGIONKEY = 3 "
+ "GROUP BY T1.R_REGIONKEY "
@@ -173,7 +173,7 @@ public class TestJdbcDistQuery extends JdbcTest{
@Test
public void testSelectLimit() throws Exception{
testQuery(String.format("select R_REGIONKEY, R_NAME "
- + "from dfs.`%s/../../sample-data/regionsMF/` "
+ + "from dfs_test.`%s/../../sample-data/regionsMF/` "
+ "limit 2", WORKING_PATH));
}
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java
index e48897f..b681719 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestJdbcQuery.java
@@ -43,19 +43,19 @@ public class TestJdbcQuery extends JdbcTestQueryBase{
@Test
public void testCast() throws Exception{
- testQuery(String.format("select R_REGIONKEY, cast(R_NAME as varchar(15)) as region, cast(R_COMMENT as varchar(255)) as comment from dfs.`%s/../../sample-data/region.parquet`", WORKING_PATH));
+ testQuery(String.format("select R_REGIONKEY, cast(R_NAME as varchar(15)) as region, cast(R_COMMENT as varchar(255)) as comment from dfs_test.`%s/../../sample-data/region.parquet`", WORKING_PATH));
}
@Test
@Ignore
public void testWorkspace() throws Exception{
- testQuery(String.format("select * from dfs.home.`%s/../../sample-data/region.parquet`", WORKING_PATH));
+ testQuery(String.format("select * from dfs_test.home.`%s/../../sample-data/region.parquet`", WORKING_PATH));
}
@Test
@Ignore
public void testWildcard() throws Exception{
- testQuery(String.format("select * from dfs.`%s/../../sample-data/region.parquet`", WORKING_PATH));
+ testQuery(String.format("select * from dfs_test.`%s/../../sample-data/region.parquet`", WORKING_PATH));
}
@Test
@@ -71,19 +71,19 @@ public class TestJdbcQuery extends JdbcTestQueryBase{
@Test
@Ignore
public void testLogicalExplain() throws Exception{
- testQuery(String.format("EXPLAIN PLAN WITHOUT IMPLEMENTATION FOR select * from dfs.`%s/../../sample-data/region.parquet`", WORKING_PATH));
+ testQuery(String.format("EXPLAIN PLAN WITHOUT IMPLEMENTATION FOR select * from dfs_test.`%s/../../sample-data/region.parquet`", WORKING_PATH));
}
@Test
@Ignore
public void testPhysicalExplain() throws Exception{
- testQuery(String.format("EXPLAIN PLAN FOR select * from dfs.`%s/../../sample-data/region.parquet`", WORKING_PATH));
+ testQuery(String.format("EXPLAIN PLAN FOR select * from dfs_test.`%s/../../sample-data/region.parquet`", WORKING_PATH));
}
@Test
@Ignore
public void checkUnknownColumn() throws Exception{
- testQuery(String.format("SELECT unknownColumn FROM dfs.`%s/../../sample-data/region.parquet`", WORKING_PATH));
+ testQuery(String.format("SELECT unknownColumn FROM dfs_test.`%s/../../sample-data/region.parquet`", WORKING_PATH));
}
@Test
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestMetadataDDL.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestMetadataDDL.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestMetadataDDL.java
index e6e540a..6d09976 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestMetadataDDL.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestMetadataDDL.java
@@ -55,15 +55,15 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
@Test
public void testShowTables() throws Exception{
- JdbcAssert.withFull("hive.default")
+ JdbcAssert.withFull("hive_test.default")
.sql("SHOW TABLES")
.returns(
- "TABLE_SCHEMA=hive.default; TABLE_NAME=readtest\n" +
- "TABLE_SCHEMA=hive.default; TABLE_NAME=empty_table\n" +
- "TABLE_SCHEMA=hive.default; TABLE_NAME=infoschematest\n" +
- "TABLE_SCHEMA=hive.default; TABLE_NAME=hiveview\n" +
- "TABLE_SCHEMA=hive.default; TABLE_NAME=kv\n" +
- "TABLE_SCHEMA=hive.default; TABLE_NAME=foodate\n"
+ "TABLE_SCHEMA=hive_test.default; TABLE_NAME=readtest\n" +
+ "TABLE_SCHEMA=hive_test.default; TABLE_NAME=empty_table\n" +
+ "TABLE_SCHEMA=hive_test.default; TABLE_NAME=infoschematest\n" +
+ "TABLE_SCHEMA=hive_test.default; TABLE_NAME=hiveview\n" +
+ "TABLE_SCHEMA=hive_test.default; TABLE_NAME=kv\n" +
+ "TABLE_SCHEMA=hive_test.default; TABLE_NAME=foodate\n"
);
}
@@ -79,15 +79,15 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
"TABLE_SCHEMA=INFORMATION_SCHEMA; TABLE_NAME=SCHEMATA\n"
);
- JdbcAssert.withFull("dfs.tmp")
- .sql("SHOW TABLES IN hive.`default`")
+ JdbcAssert.withFull("dfs_test.tmp")
+ .sql("SHOW TABLES IN hive_test.`default`")
.returns(
- "TABLE_SCHEMA=hive.default; TABLE_NAME=readtest\n" +
- "TABLE_SCHEMA=hive.default; TABLE_NAME=empty_table\n" +
- "TABLE_SCHEMA=hive.default; TABLE_NAME=infoschematest\n" +
- "TABLE_SCHEMA=hive.default; TABLE_NAME=hiveview\n" +
- "TABLE_SCHEMA=hive.default; TABLE_NAME=kv\n" +
- "TABLE_SCHEMA=hive.default; TABLE_NAME=foodate\n");
+ "TABLE_SCHEMA=hive_test.default; TABLE_NAME=readtest\n" +
+ "TABLE_SCHEMA=hive_test.default; TABLE_NAME=empty_table\n" +
+ "TABLE_SCHEMA=hive_test.default; TABLE_NAME=infoschematest\n" +
+ "TABLE_SCHEMA=hive_test.default; TABLE_NAME=hiveview\n" +
+ "TABLE_SCHEMA=hive_test.default; TABLE_NAME=kv\n" +
+ "TABLE_SCHEMA=hive_test.default; TABLE_NAME=foodate\n");
}
@Test
@@ -107,13 +107,16 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
@Test
public void testShowDatabases() throws Exception{
String expected =
- "SCHEMA_NAME=hive.default\n" +
- "SCHEMA_NAME=hive.db1\n" +
- "SCHEMA_NAME=dfs.home\n" +
"SCHEMA_NAME=dfs.default\n" +
+ "SCHEMA_NAME=dfs.root\n" +
"SCHEMA_NAME=dfs.tmp\n" +
- "SCHEMA_NAME=cp.default\n" +
"SCHEMA_NAME=sys\n" +
+ "SCHEMA_NAME=dfs_test.home\n" +
+ "SCHEMA_NAME=dfs_test.default\n" +
+ "SCHEMA_NAME=dfs_test.tmp\n" +
+ "SCHEMA_NAME=cp.default\n" +
+ "SCHEMA_NAME=hive_test.default\n" +
+ "SCHEMA_NAME=hive_test.db1\n" +
"SCHEMA_NAME=INFORMATION_SCHEMA\n";
JdbcAssert.withNoDefaultSchema().sql("SHOW DATABASES").returns(expected);
@@ -123,8 +126,8 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
@Test
public void testShowDatabasesWhere() throws Exception{
JdbcAssert.withNoDefaultSchema()
- .sql("SHOW DATABASES WHERE SCHEMA_NAME='dfs.tmp'")
- .returns("SCHEMA_NAME=dfs.tmp\n");
+ .sql("SHOW DATABASES WHERE SCHEMA_NAME='dfs_test.tmp'")
+ .returns("SCHEMA_NAME=dfs_test.tmp\n");
}
@Test
@@ -132,8 +135,8 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
JdbcAssert.withNoDefaultSchema()
.sql("SHOW DATABASES LIKE '%i%'")
.returns(
- "SCHEMA_NAME=hive.default\n"+
- "SCHEMA_NAME=hive.db1");
+ "SCHEMA_NAME=hive_test.default\n"+
+ "SCHEMA_NAME=hive_test.db1");
}
@Test
@@ -150,7 +153,7 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
@Test
public void testDescribeTableNullableColumns() throws Exception{
JdbcAssert.withNoDefaultSchema()
- .sql("DESCRIBE hive.`default`.kv")
+ .sql("DESCRIBE hive_test.`default`.kv")
.returns(
"COLUMN_NAME=key; DATA_TYPE=INTEGER; IS_NULLABLE=YES\n" +
"COLUMN_NAME=value; DATA_TYPE=VARCHAR; IS_NULLABLE=YES\n"
@@ -174,10 +177,10 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
public Void apply(Connection connection) {
try {
Statement statement = connection.createStatement();
- statement.executeQuery("USE dfs.tmp").close();
+ statement.executeQuery("USE dfs_test.tmp").close();
- // INFORMATION_SCHEMA already has a table named "TABLES". Now create a table with same name in "dfs.tmp" schema
- statement.executeQuery("CREATE OR REPLACE VIEW `TABLES` AS SELECT key FROM hive.kv").close();
+ // INFORMATION_SCHEMA already has a table named "TABLES". Now create a table with same name in "dfs_test.tmp" schema
+ statement.executeQuery("CREATE OR REPLACE VIEW `TABLES` AS SELECT key FROM hive_test.kv").close();
// Test describe of `TABLES` with no schema qualifier
ResultSet resultSet = statement.executeQuery("DESCRIBE `TABLES`");
@@ -247,7 +250,7 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
JdbcAssert.withNoDefaultSchema()
.sql("SELECT COLUMN_NAME, DATA_TYPE, CHARACTER_MAXIMUM_LENGTH, NUMERIC_PRECISION " +
"FROM INFORMATION_SCHEMA.`COLUMNS` " +
- "WHERE TABLE_SCHEMA = 'hive.default' AND TABLE_NAME = 'infoschematest' AND " +
+ "WHERE TABLE_SCHEMA = 'hive_test.default' AND TABLE_NAME = 'infoschematest' AND " +
"(COLUMN_NAME = 'stringtype' OR COLUMN_NAME = 'varchartype' OR " +
"COLUMN_NAME = 'inttype' OR COLUMN_NAME = 'decimaltype')")
.returns(
@@ -259,7 +262,7 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
@Test
public void testDefaultSchemaDfs() throws Exception{
- JdbcAssert.withFull("dfs")
+ JdbcAssert.withFull("dfs_test")
.sql(String.format("SELECT R_REGIONKEY FROM `%s/../../sample-data/region.parquet` LIMIT 2", WORKING_PATH))
.returns(
"R_REGIONKEY=0\n" +
@@ -277,7 +280,7 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
@Test
public void testDefaultSchemaHive() throws Exception{
- JdbcAssert.withFull("hive")
+ JdbcAssert.withFull("hive_test")
.sql("SELECT * FROM kv LIMIT 2")
.returns(
"key=1; value= key_1\n" +
@@ -286,7 +289,7 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
@Test
public void testDefaultTwoLevelSchemaHive() throws Exception{
- JdbcAssert.withFull("hive.db1")
+ JdbcAssert.withFull("hive_test.db1")
.sql("SELECT * FROM `kv_db1` LIMIT 2")
.returns(
"key=1; value= key_1\n" +
@@ -295,7 +298,7 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
@Test
public void testQueryFromNonDefaultSchema() throws Exception{
- JdbcAssert.withFull("hive")
+ JdbcAssert.withFull("hive_test")
.sql("SELECT full_name FROM cp.`employee.json` LIMIT 2")
.returns(
"full_name=Sheri Nowmer\n" +
@@ -305,8 +308,8 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
@Test
public void testUseSchema() throws Exception{
JdbcAssert.withNoDefaultSchema()
- .sql("USE hive.`default`")
- .returns("ok=true; summary=Default schema changed to 'hive.default'");
+ .sql("USE hive_test.`default`")
+ .returns("ok=true; summary=Default schema changed to 'hive_test.default'");
}
@Test
@@ -322,10 +325,10 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
public Void apply(Connection connection) {
try {
Statement statement = connection.createStatement();
- ResultSet resultSet = statement.executeQuery("USE hive.db1");
+ ResultSet resultSet = statement.executeQuery("USE hive_test.db1");
String result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
- String expected = "ok=true; summary=Default schema changed to 'hive.db1'";
+ String expected = "ok=true; summary=Default schema changed to 'hive_test.db1'";
assertTrue(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected), expected.equals(result));
@@ -344,7 +347,7 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
}
// Tests using backticks around the complete schema path
- // select * from `dfs.tmp`.`/tmp/nation.parquet`;
+ // select * from `dfs_test.tmp`.`/tmp/nation.parquet`;
@Test
public void testCompleteSchemaRef1() throws Exception {
testQuery("select * from `cp.default`.`employee.json` limit 2");
@@ -358,10 +361,10 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
Statement statement = connection.createStatement();
// change default schema
- ResultSet resultSet = statement.executeQuery("USE `dfs.default`");
+ ResultSet resultSet = statement.executeQuery("USE `dfs_test.default`");
String result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
- String expected = "ok=true; summary=Default schema changed to 'dfs.default'";
+ String expected = "ok=true; summary=Default schema changed to 'dfs_test.default'";
assertTrue(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected), expected.equals(result));
resultSet = statement.executeQuery(
@@ -382,8 +385,8 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
@Test
public void testShowFiles() throws Exception {
- testQuery("show files from dfs.`/tmp`");
- testQuery("show files from `dfs.default`.`/tmp`");
+ testQuery("show files from dfs_test.`/tmp`");
+ testQuery("show files from `dfs_test.default`.`/tmp`");
}
@@ -395,7 +398,7 @@ public class TestMetadataDDL extends JdbcTestQueryBase {
Statement statement = connection.createStatement();
// change default schema
- statement.executeQuery("USE dfs.`default`");
+ statement.executeQuery("USE dfs_test.`default`");
// show files
ResultSet resultSet = statement.executeQuery("show files from `/tmp`");
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestViews.java
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestViews.java b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestViews.java
index e3f6a8e..d21b56e 100644
--- a/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestViews.java
+++ b/exec/jdbc/src/test/java/org/apache/drill/jdbc/test/TestViews.java
@@ -57,13 +57,13 @@ public class TestViews extends JdbcTestQueryBase {
Statement statement = connection.createStatement();
// change default schema
- statement.executeQuery("USE dfs.tmp");
+ statement.executeQuery("USE dfs_test.tmp");
// create view
ResultSet resultSet = statement.executeQuery(viewCreate);
String result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
- String viewCreateResult = "ok=true; summary=View '" + viewName + "' created successfully in 'dfs.tmp' schema";
+ String viewCreateResult = "ok=true; summary=View '" + viewName + "' created successfully in 'dfs_test.tmp' schema";
assertTrue(String.format("Generated string:\n%s\ndoes not match:\n%s", result, viewCreateResult),
viewCreateResult.equals(result));
@@ -176,7 +176,7 @@ public class TestViews extends JdbcTestQueryBase {
@Test
public void testViewOnHiveTable1() throws Exception{
testViewHelper(
- "CREATE VIEW hiveview AS SELECT * FROM hive.kv",
+ "CREATE VIEW hiveview AS SELECT * FROM hive_test.kv",
"hiveview",
"SELECT * FROM hiveview LIMIT 1",
"key=1; value= key_1");
@@ -185,7 +185,7 @@ public class TestViews extends JdbcTestQueryBase {
@Test
public void testViewOnHiveTable2() throws Exception{
testViewHelper(
- "CREATE VIEW hiveview AS SELECT * FROM hive.kv",
+ "CREATE VIEW hiveview AS SELECT * FROM hive_test.kv",
"hiveview",
"SELECT key, `value` FROM hiveview LIMIT 1",
"key=1; value= key_1");
@@ -194,7 +194,7 @@ public class TestViews extends JdbcTestQueryBase {
@Test
public void testViewOnHiveTable3() throws Exception{
testViewHelper(
- "CREATE VIEW hiveview AS SELECT * FROM hive.kv",
+ "CREATE VIEW hiveview AS SELECT * FROM hive_test.kv",
"hiveview",
"SELECT `value` FROM hiveview LIMIT 1",
"value= key_1");
@@ -203,7 +203,7 @@ public class TestViews extends JdbcTestQueryBase {
@Test
public void testViewOnHiveTable4() throws Exception{
testViewHelper(
- "CREATE VIEW hiveview AS SELECT key, `value` FROM hive.kv",
+ "CREATE VIEW hiveview AS SELECT key, `value` FROM hive_test.kv",
"hiveview",
"SELECT * FROM hiveview LIMIT 1",
"key=1; value= key_1");
@@ -212,7 +212,7 @@ public class TestViews extends JdbcTestQueryBase {
@Test
public void testViewOnHiveTable5() throws Exception{
testViewHelper(
- "CREATE VIEW hiveview AS SELECT key, `value` FROM hive.kv",
+ "CREATE VIEW hiveview AS SELECT key, `value` FROM hive_test.kv",
"hiveview",
"SELECT key, `value` FROM hiveview LIMIT 1",
"key=1; value= key_1");
@@ -225,7 +225,7 @@ public class TestViews extends JdbcTestQueryBase {
"cast(columns[1] AS CHAR(25)) n_name, " +
"cast(columns[2] AS INT) n_regionkey, " +
"cast(columns[3] AS VARCHAR(152)) n_comment " +
- "FROM dfs.`%s/src/test/resources/nation`", WORKING_PATH);
+ "FROM dfs_test.`%s/src/test/resources/nation`", WORKING_PATH);
testViewHelper(
query,
@@ -242,7 +242,7 @@ public class TestViews extends JdbcTestQueryBase {
Statement statement = connection.createStatement();
// change default schema
- statement.executeQuery("USE dfs.tmp");
+ statement.executeQuery("USE dfs_test.tmp");
// create view
statement.executeQuery(
@@ -259,7 +259,7 @@ public class TestViews extends JdbcTestQueryBase {
resultSet = statement.executeQuery("DROP VIEW testview3");
result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
- expected = "ok=true; summary=View 'testview3' deleted successfully from 'dfs.tmp' schema";
+ expected = "ok=true; summary=View 'testview3' deleted successfully from 'dfs_test.tmp' schema";
assertTrue(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected),
expected.equals(result));
@@ -280,17 +280,17 @@ public class TestViews extends JdbcTestQueryBase {
Statement statement = connection.createStatement();
// change default schema
- statement.executeQuery("USE dfs.tmp");
+ statement.executeQuery("USE dfs_test.tmp");
// create view
statement.executeQuery(
- "CREATE VIEW testview3 AS SELECT * FROM hive.kv");
+ "CREATE VIEW testview3 AS SELECT * FROM hive_test.kv");
// show tables on view
ResultSet resultSet = statement.executeQuery("SHOW TABLES like 'testview3'");
String result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
- String expected = "TABLE_SCHEMA=dfs.tmp; TABLE_NAME=testview3";
+ String expected = "TABLE_SCHEMA=dfs_test.tmp; TABLE_NAME=testview3";
assertTrue(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected),
expected.equals(result));
@@ -299,7 +299,7 @@ public class TestViews extends JdbcTestQueryBase {
"WHERE TABLE_NAME = 'testview3'");
result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
- expected = "TABLE_CATALOG=DRILL; TABLE_SCHEMA=dfs.tmp; TABLE_NAME=testview3; VIEW_DEFINITION=SELECT *\nFROM `hive`.`kv`";
+ expected = "TABLE_CATALOG=DRILL; TABLE_SCHEMA=dfs_test.tmp; TABLE_NAME=testview3; VIEW_DEFINITION=SELECT *\nFROM `hive_test`.`kv`";
DrillAssert.assertMultiLineStringEquals(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected),
expected, result);
@@ -308,12 +308,12 @@ public class TestViews extends JdbcTestQueryBase {
"WHERE TABLE_NAME = 'testview3'");
result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
- expected = "TABLE_CATALOG=DRILL; TABLE_SCHEMA=dfs.tmp; TABLE_NAME=testview3; TABLE_TYPE=VIEW";
+ expected = "TABLE_CATALOG=DRILL; TABLE_SCHEMA=dfs_test.tmp; TABLE_NAME=testview3; TABLE_TYPE=VIEW";
assertTrue(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected),
expected.equals(result));
// describe a view
- resultSet = statement.executeQuery("DESCRIBE dfs.tmp.testview3");
+ resultSet = statement.executeQuery("DESCRIBE dfs_test.tmp.testview3");
result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
expected =
@@ -335,9 +335,9 @@ public class TestViews extends JdbcTestQueryBase {
@Test
public void testInfoSchemaWithHiveView() throws Exception {
- JdbcAssert.withFull("hive.default")
+ JdbcAssert.withFull("hive_test.default")
.sql("SELECT * FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = 'hiveview'")
- .returns("TABLE_CATALOG=DRILL; TABLE_SCHEMA=hive.default; TABLE_NAME=hiveview; " +
+ .returns("TABLE_CATALOG=DRILL; TABLE_SCHEMA=hive_test.default; TABLE_NAME=hiveview; " +
"VIEW_DEFINITION=SELECT `kv`.`key`, `kv`.`value` FROM `default`.`kv`");
}
@@ -352,24 +352,24 @@ public class TestViews extends JdbcTestQueryBase {
statement.executeQuery("USE cp");
// create a view with full schema identifier
- ResultSet resultSet = statement.executeQuery("CREATE VIEW dfs.tmp.testview AS SELECT * FROM hive.kv");
+ ResultSet resultSet = statement.executeQuery("CREATE VIEW dfs_test.tmp.testview AS SELECT * FROM hive_test.kv");
String result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
- String expected = "ok=true; summary=View 'testview' created successfully in 'dfs.tmp' schema";
+ String expected = "ok=true; summary=View 'testview' created successfully in 'dfs_test.tmp' schema";
assertTrue(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected),
expected.equals(result));
// query from view
- resultSet = statement.executeQuery("SELECT key FROM dfs.tmp.testview LIMIT 1");
+ resultSet = statement.executeQuery("SELECT key FROM dfs_test.tmp.testview LIMIT 1");
result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
expected = "key=1";
assertTrue(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected),
expected.equals(result));
- statement.executeQuery("drop view dfs.tmp.testview").close();
+ statement.executeQuery("drop view dfs_test.tmp.testview").close();
- statement.executeQuery("drop view dfs.tmp.testview").close();
+ statement.executeQuery("drop view dfs_test.tmp.testview").close();
statement.close();
return null;
@@ -388,13 +388,13 @@ public class TestViews extends JdbcTestQueryBase {
Statement statement = connection.createStatement();
// change default schema
- statement.executeQuery("USE dfs");
+ statement.executeQuery("USE dfs_test");
// create a view with partial schema identifier
- ResultSet resultSet = statement.executeQuery("CREATE VIEW tmp.testview AS SELECT * FROM hive.kv");
+ ResultSet resultSet = statement.executeQuery("CREATE VIEW tmp.testview AS SELECT * FROM hive_test.kv");
String result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
- String expected = "ok=true; summary=View 'testview' created successfully in 'dfs.tmp' schema";
+ String expected = "ok=true; summary=View 'testview' created successfully in 'dfs_test.tmp' schema";
assertTrue(String.format("Generated string:\n%s\ndoes not match:\n%s", result, expected),
expected.equals(result));
@@ -407,7 +407,7 @@ public class TestViews extends JdbcTestQueryBase {
expected.equals(result));
// change the default schema and query
- statement.executeQuery("USE dfs.tmp");
+ statement.executeQuery("USE dfs_test.tmp");
resultSet = statement.executeQuery("SELECT key FROM testview LIMIT 1");
result = JdbcAssert.toString(resultSet).trim();
resultSet.close();
http://git-wip-us.apache.org/repos/asf/incubator-drill/blob/699851b8/exec/jdbc/src/test/resources/bootstrap-storage-plugins.json
----------------------------------------------------------------------
diff --git a/exec/jdbc/src/test/resources/bootstrap-storage-plugins.json b/exec/jdbc/src/test/resources/bootstrap-storage-plugins.json
index 53600f3..548ec3d 100644
--- a/exec/jdbc/src/test/resources/bootstrap-storage-plugins.json
+++ b/exec/jdbc/src/test/resources/bootstrap-storage-plugins.json
@@ -1,57 +1,15 @@
{
"storage":{
- dfs: {
- type: "file",
- connection: "file:///",
- workspaces: {
- "home" : {
- location: "/",
- writable: false
- },
- "tmp" : {
- location: "/tmp/drilltest",
- writable: true,
- storageformat: "csv"
- }
- },
- formats: {
- "psv" : {
- type: "text",
- extensions: [ "tbl" ],
- delimiter: "|"
- },
- "csv" : {
- type: "text",
- extensions: [ "csv" ],
- delimiter: ","
- },
- "tsv" : {
- type: "text",
- extensions: [ "tsv" ],
- delimiter: "\t"
- },
- "parquet" : {
- type: "parquet"
- },
- "json" : {
- type: "json"
- }
- }
- },
- cp: {
- type: "file",
- connection: "classpath:///"
- },
- hive : {
- type:"hive",
- config :
- {
- "hive.metastore.uris" : "",
- "javax.jdo.option.ConnectionURL" : "jdbc:derby:;databaseName=/tmp/drill_hive_db;create=true",
- "hive.metastore.warehouse.dir" : "/tmp/drill_hive_wh",
- "fs.default.name" : "file:///",
- "hive.metastore.sasl.enabled" : "false"
- }
+ hive_test : {
+ type:"hive",
+ config :
+ {
+ "hive.metastore.uris" : "",
+ "javax.jdo.option.ConnectionURL" : "jdbc:derby:;databaseName=/tmp/drill_hive_db;create=true",
+ "hive.metastore.warehouse.dir" : "/tmp/drill_hive_wh",
+ "fs.default.name" : "file:///",
+ "hive.metastore.sasl.enabled" : "false"
}
+ }
}
}
|