drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From prog...@apache.org
Subject [09/22] drill git commit: DRILL-5783, DRILL-5841, DRILL-5894: Rationalize test temp directories
Date Wed, 15 Nov 2017 01:46:55 GMT
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java
index 00a7087..a8eef3c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java
@@ -27,18 +27,20 @@ import static org.junit.Assert.assertTrue;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.collect.ImmutableList;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlTest;
-import org.apache.drill.TestBuilder;
+import org.apache.drill.test.TestBuilder;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.record.RecordBatchLoader;
 import org.apache.drill.exec.record.VectorWrapper;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.store.dfs.FileSystemConfig;
 import org.apache.drill.exec.vector.NullableVarCharVector;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.nio.file.Paths;
 import java.util.List;
 import java.util.Map;
 
@@ -51,9 +53,15 @@ import java.util.Map;
  */
 @Category(SqlTest.class)
 public class TestInfoSchema extends BaseTestQuery {
-
+  public static final String TEST_SUB_DIR = "testSubDir";
   private static final ObjectMapper mapper = new ObjectMapper().enable(INDENT_OUTPUT);
 
+  @BeforeClass
+  public static void setupFiles() {
+    dirTestWatcher.copyFileToRoot(Paths.get("sample-data"));
+    dirTestWatcher.makeRootSubDir(Paths.get(TEST_SUB_DIR));
+  }
+
   @Test
   public void selectFromAllTables() throws Exception{
     test("select * from INFORMATION_SCHEMA.SCHEMATA");
@@ -133,9 +141,6 @@ public class TestInfoSchema extends BaseTestQuery {
             new String[] { "dfs.tmp" },
             new String[] { "cp.default" },
             new String[] { "sys" },
-            new String[] { "dfs_test.home" },
-            new String[] { "dfs_test.default" },
-            new String[] { "dfs_test.tmp" },
             new String[] { "INFORMATION_SCHEMA" }
         );
 
@@ -161,10 +166,10 @@ public class TestInfoSchema extends BaseTestQuery {
   @Test
   public void showDatabasesWhere() throws Exception{
     testBuilder()
-        .sqlQuery("SHOW DATABASES WHERE SCHEMA_NAME='dfs_test.tmp'")
+        .sqlQuery("SHOW DATABASES WHERE SCHEMA_NAME='dfs.tmp'")
         .unOrdered()
         .baselineColumns("SCHEMA_NAME")
-        .baselineValues("dfs_test.tmp")
+        .baselineValues("dfs.tmp")
         .go();
   }
 
@@ -207,13 +212,13 @@ public class TestInfoSchema extends BaseTestQuery {
   @Test
   public void describeWhenSameTableNameExistsInMultipleSchemas() throws Exception{
     try {
-      test("USE dfs_test.tmp");
+      test("USE dfs.tmp");
       test("CREATE OR REPLACE VIEW `TABLES` AS SELECT full_name FROM cp.`employee.json`");
 
       testBuilder()
           .sqlQuery("DESCRIBE `TABLES`")
           .unOrdered()
-          .optionSettingQueriesForTestQuery("USE dfs_test.tmp")
+          .optionSettingQueriesForTestQuery("USE dfs.tmp")
           .baselineColumns("COLUMN_NAME", "DATA_TYPE", "IS_NULLABLE")
           .baselineValues("full_name", "ANY", "YES")
           .go();
@@ -228,7 +233,7 @@ public class TestInfoSchema extends BaseTestQuery {
           .baselineValues("TABLE_TYPE", "CHARACTER VARYING", "NO")
           .go();
     } finally {
-      test("DROP VIEW dfs_test.tmp.`TABLES`");
+      test("DROP VIEW dfs.tmp.`TABLES`");
     }
   }
 
@@ -280,9 +285,9 @@ public class TestInfoSchema extends BaseTestQuery {
   @Test
   public void defaultSchemaDfs() throws Exception{
     testBuilder()
-        .sqlQuery("SELECT R_REGIONKEY FROM `[WORKING_PATH]/../../sample-data/region.parquet` LIMIT 1")
+        .sqlQuery("SELECT R_REGIONKEY FROM `sample-data/region.parquet` LIMIT 1")
         .unOrdered()
-        .optionSettingQueriesForTestQuery("USE dfs_test")
+        .optionSettingQueriesForTestQuery("USE dfs")
         .baselineColumns("R_REGIONKEY")
         .baselineValues(0L)
         .go();
@@ -305,7 +310,7 @@ public class TestInfoSchema extends BaseTestQuery {
     testBuilder()
         .sqlQuery("SELECT full_name FROM cp.`employee.json` LIMIT 1")
         .unOrdered()
-        .optionSettingQueriesForTestQuery("USE dfs_test")
+        .optionSettingQueriesForTestQuery("USE dfs")
         .baselineColumns("full_name")
         .baselineValues("Sheri Nowmer")
         .go();
@@ -314,27 +319,27 @@ public class TestInfoSchema extends BaseTestQuery {
   @Test
   public void useSchema() throws Exception{
     testBuilder()
-        .sqlQuery("USE dfs_test.`default`")
+        .sqlQuery("USE dfs.`default`")
         .unOrdered()
         .baselineColumns("ok", "summary")
-        .baselineValues(true, "Default schema changed to [dfs_test.default]")
+        .baselineValues(true, "Default schema changed to [dfs.default]")
         .go();
   }
 
   @Test
   public void useSubSchemaWithinSchema() throws Exception{
     testBuilder()
-        .sqlQuery("USE dfs_test")
+        .sqlQuery("USE dfs")
         .unOrdered()
         .baselineColumns("ok", "summary")
-        .baselineValues(true, "Default schema changed to [dfs_test]")
+        .baselineValues(true, "Default schema changed to [dfs]")
         .go();
 
     testBuilder()
         .sqlQuery("USE tmp")
         .unOrdered()
         .baselineColumns("ok", "summary")
-        .baselineValues(true, "Default schema changed to [dfs_test.tmp]")
+        .baselineValues(true, "Default schema changed to [dfs.tmp]")
         .go();
 
     testBuilder()
@@ -352,7 +357,7 @@ public class TestInfoSchema extends BaseTestQuery {
   }
 
   // Tests using backticks around the complete schema path
-  // select * from `dfs_test.tmp`.`/tmp/nation.parquet`;
+  // select * from `dfs.tmp`.`/tmp/nation.parquet`;
   @Test
   public void completeSchemaRef1() throws Exception {
     test("SELECT * FROM `cp.default`.`employee.json` limit 2");
@@ -360,26 +365,26 @@ public class TestInfoSchema extends BaseTestQuery {
 
   @Test
   public void showFiles() throws Exception {
-    test("show files from dfs_test.`/tmp`");
-    test("show files from `dfs_test.default`.`/tmp`");
+    test("show files from dfs.`%s`", TEST_SUB_DIR);
+    test("show files from `dfs.default`.`%s`", TEST_SUB_DIR);
   }
 
   @Test
   public void showFilesWithDefaultSchema() throws Exception{
-    test("USE dfs_test.`default`");
-    test("SHOW FILES FROM `/tmp`");
+    test("USE dfs.`default`");
+    test("SHOW FILES FROM `%s`", TEST_SUB_DIR);
   }
 
   @Test
   public void describeSchemaSyntax() throws Exception {
-    test("describe schema dfs_test");
-    test("describe schema dfs_test.`default`");
-    test("describe database dfs_test.`default`");
+    test("describe schema dfs");
+    test("describe schema dfs.`default`");
+    test("describe database dfs.`default`");
   }
 
   @Test
   public void describeSchemaOutput() throws Exception {
-    final List<QueryDataBatch> result = testSqlWithResults("describe schema dfs_test.tmp");
+    final List<QueryDataBatch> result = testSqlWithResults("describe schema dfs.tmp");
     assertTrue(result.size() == 1);
     final QueryDataBatch batch = result.get(0);
     final RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
@@ -390,7 +395,7 @@ public class TestInfoSchema extends BaseTestQuery {
         NullableVarCharVector.class,
         loader.getValueVectorId(SchemaPath.getCompoundPath("schema")).getFieldIds());
     String schema = schemaValueVector.getValueVector().getAccessor().getObject(0).toString();
-    assertEquals("dfs_test.tmp", schema);
+    assertEquals("dfs.tmp", schema);
 
     // check properties column value
     final VectorWrapper propertiesValueVector = loader.getValueAccessorById(
@@ -408,7 +413,7 @@ public class TestInfoSchema extends BaseTestQuery {
     // check some stable properties values
     assertEquals("file", configMap.get("type"));
 
-    final FileSystemConfig testConfig = (FileSystemConfig) bits[0].getContext().getStorage().getPlugin("dfs_test").getConfig();
+    final FileSystemConfig testConfig = (FileSystemConfig) bits[0].getContext().getStorage().getPlugin("dfs").getConfig();
     final String tmpSchemaLocation = testConfig.workspaces.get("tmp").getLocation();
     assertEquals(tmpSchemaLocation, configMap.get("location"));
 
@@ -420,5 +425,4 @@ public class TestInfoSchema extends BaseTestQuery {
   public void describeSchemaInvalid() throws Exception {
     errorMsgTestHelper("describe schema invalid.schema", "Invalid schema name [invalid.schema]");
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestSimpleCastFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestSimpleCastFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestSimpleCastFunctions.java
index ea64c7d..00c7af4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestSimpleCastFunctions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestSimpleCastFunctions.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.sql;
 
 import com.google.common.base.Function;
 import com.google.common.collect.Lists;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlTest;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java
index c2bd215..d571c66 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestViewSupport.java
@@ -21,23 +21,34 @@ import com.google.common.collect.ImmutableList;
 import org.apache.commons.io.FileUtils;
 import org.apache.drill.categories.SqlTest;
 import org.apache.drill.categories.UnlikelyTest;
+import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import java.io.File;
+import java.nio.file.Paths;
 import java.util.List;
 
+import static org.apache.drill.exec.util.StoragePluginTestUtils.DFS_TMP_SCHEMA;
+import static org.apache.drill.exec.util.StoragePluginTestUtils.TMP_SCHEMA;
+
 @Category(SqlTest.class)
 public class TestViewSupport extends TestBaseViewSupport {
+
+  @BeforeClass
+  public static void setupTestFiles() {
+    dirTestWatcher.copyResourceToRoot(Paths.get("nation"));
+  }
+
   @Test
   public void referToSchemaInsideAndOutsideView() throws Exception {
-    String use = "use dfs_test.tmp;";
+    String use = "use dfs.tmp;";
     String selectInto = "create table monkey as select c_custkey, c_nationkey from cp.`tpch/customer.parquet`";
     String createView = "create or replace view myMonkeyView as select c_custkey, c_nationkey from monkey";
     String selectInside = "select * from myMonkeyView;";
     String use2 = "use cp;";
-    String selectOutside = "select * from dfs_test.tmp.myMonkeyView;";
+    String selectOutside = "select * from dfs.tmp.myMonkeyView;";
 
     test(use);
     test(selectInto);
@@ -56,7 +67,7 @@ public class TestViewSupport extends TestBaseViewSupport {
     final String viewName = "testNullabilityPropertyInViewPersistence";
     try {
 
-      test("USE dfs_test.tmp");
+      test("USE dfs.tmp");
       test(String.format("CREATE OR REPLACE VIEW %s AS SELECT " +
           "CAST(customer_id AS BIGINT) as cust_id, " +
           "CAST(fname AS VARCHAR(25)) as fname, " +
@@ -88,7 +99,7 @@ public class TestViewSupport extends TestBaseViewSupport {
   @Test
   public void viewWithStarInDef_StarInQuery() throws Exception {
     testViewHelper(
-        TEMP_SCHEMA,
+        DFS_TMP_SCHEMA,
         null,
         "SELECT * FROM cp.`region.json` ORDER BY `region_id`",
         "SELECT * FROM TEST_SCHEMA.TEST_VIEW_NAME LIMIT 1",
@@ -101,7 +112,7 @@ public class TestViewSupport extends TestBaseViewSupport {
   @Test
   public void viewWithSelectFieldsInDef_StarInQuery() throws Exception {
     testViewHelper(
-        TEMP_SCHEMA,
+        DFS_TMP_SCHEMA,
         null,
         "SELECT region_id, sales_city FROM cp.`region.json` ORDER BY `region_id`",
         "SELECT * FROM TEST_SCHEMA.TEST_VIEW_NAME LIMIT 2",
@@ -116,7 +127,7 @@ public class TestViewSupport extends TestBaseViewSupport {
   @Test
   public void viewWithSelectFieldsInDef_SelectFieldsInView_StarInQuery() throws Exception {
     testViewHelper(
-        TEMP_SCHEMA,
+        DFS_TMP_SCHEMA,
         "(regionid, salescity)",
         "SELECT region_id, sales_city FROM cp.`region.json` ORDER BY `region_id`",
         "SELECT * FROM TEST_SCHEMA.TEST_VIEW_NAME LIMIT 2",
@@ -131,7 +142,7 @@ public class TestViewSupport extends TestBaseViewSupport {
   @Test
   public void viewWithStarInDef_SelectFieldsInQuery() throws Exception{
     testViewHelper(
-        TEMP_SCHEMA,
+        DFS_TMP_SCHEMA,
         null,
         "SELECT * FROM cp.`region.json` ORDER BY `region_id`",
         "SELECT region_id, sales_city FROM TEST_SCHEMA.TEST_VIEW_NAME LIMIT 2",
@@ -146,7 +157,7 @@ public class TestViewSupport extends TestBaseViewSupport {
   @Test
   public void viewWithSelectFieldsInDef_SelectFieldsInQuery1() throws Exception {
     testViewHelper(
-        TEMP_SCHEMA,
+        DFS_TMP_SCHEMA,
         null,
         "SELECT region_id, sales_city FROM cp.`region.json` ORDER BY `region_id`",
         "SELECT region_id, sales_city FROM TEST_SCHEMA.TEST_VIEW_NAME LIMIT 2",
@@ -161,7 +172,7 @@ public class TestViewSupport extends TestBaseViewSupport {
   @Test
   public void viewWithSelectFieldsInDef_SelectFieldsInQuery2() throws Exception {
     testViewHelper(
-        TEMP_SCHEMA,
+        DFS_TMP_SCHEMA,
         null,
         "SELECT region_id, sales_city FROM cp.`region.json` ORDER BY `region_id`",
         "SELECT sales_city FROM TEST_SCHEMA.TEST_VIEW_NAME LIMIT 2",
@@ -176,7 +187,7 @@ public class TestViewSupport extends TestBaseViewSupport {
   @Test
   public void viewWithSelectFieldsInDef_SelectFieldsInView_SelectFieldsInQuery1() throws Exception {
     testViewHelper(
-        TEMP_SCHEMA,
+        DFS_TMP_SCHEMA,
         "(regionid, salescity)",
         "SELECT region_id, sales_city FROM cp.`region.json` ORDER BY `region_id` LIMIT 2",
         "SELECT regionid, salescity FROM TEST_SCHEMA.TEST_VIEW_NAME LIMIT 2",
@@ -191,7 +202,7 @@ public class TestViewSupport extends TestBaseViewSupport {
   @Test
   public void viewWithSelectFieldsInDef_SelectFieldsInView_SelectFieldsInQuery2() throws Exception {
     testViewHelper(
-        TEMP_SCHEMA,
+        DFS_TMP_SCHEMA,
         "(regionid, salescity)",
         "SELECT region_id, sales_city FROM cp.`region.json` ORDER BY `region_id` DESC",
         "SELECT regionid FROM TEST_SCHEMA.TEST_VIEW_NAME LIMIT 2",
@@ -207,7 +218,7 @@ public class TestViewSupport extends TestBaseViewSupport {
   @Ignore("DRILL-1921")
   public void viewWithUnionWithSelectFieldsInDef_StarInQuery() throws Exception{
     testViewHelper(
-        TEMP_SCHEMA,
+        DFS_TMP_SCHEMA,
         null,
         "SELECT region_id FROM cp.`region.json` UNION SELECT employee_id FROM cp.`employee.json`",
         "SELECT regionid FROM TEST_SCHEMA.TEST_VIEW_NAME LIMIT 2",
@@ -225,20 +236,20 @@ public class TestViewSupport extends TestBaseViewSupport {
     final String outerView = generateViewName();
 
     try {
-      createViewHelper(TEMP_SCHEMA, innerView, TEMP_SCHEMA, null,
+      createViewHelper(DFS_TMP_SCHEMA, innerView, DFS_TMP_SCHEMA, null,
           "SELECT region_id, sales_city FROM cp.`region.json` ORDER BY `region_id`");
 
-      createViewHelper(TEMP_SCHEMA, outerView, TEMP_SCHEMA, null,
-          String.format("SELECT region_id FROM %s.`%s`", TEMP_SCHEMA, innerView));
+      createViewHelper(DFS_TMP_SCHEMA, outerView, DFS_TMP_SCHEMA, null,
+          String.format("SELECT region_id FROM %s.`%s`", DFS_TMP_SCHEMA, innerView));
 
       queryViewHelper(
-          String.format("SELECT region_id FROM %s.`%s` LIMIT 1", TEMP_SCHEMA, outerView),
+          String.format("SELECT region_id FROM %s.`%s` LIMIT 1", DFS_TMP_SCHEMA, outerView),
           new String[] { "region_id" },
           ImmutableList.of(new Object[] { 0L })
       );
     } finally {
-      dropViewHelper(TEMP_SCHEMA, outerView, TEMP_SCHEMA);
-      dropViewHelper(TEMP_SCHEMA, innerView, TEMP_SCHEMA);
+      dropViewHelper(DFS_TMP_SCHEMA, outerView, DFS_TMP_SCHEMA);
+      dropViewHelper(DFS_TMP_SCHEMA, innerView, DFS_TMP_SCHEMA);
     }
   }
 
@@ -250,10 +261,10 @@ public class TestViewSupport extends TestBaseViewSupport {
         "cast(columns[1] AS CHAR(25)) n_name, " +
         "cast(columns[2] AS INT) n_regionkey, " +
         "cast(columns[3] AS VARCHAR(152)) n_comment " +
-        "FROM dfs_test.`[WORKING_PATH]/src/test/resources/nation`";
+        "FROM dfs.`nation`";
 
     testViewHelper(
-        TEMP_SCHEMA,
+        DFS_TMP_SCHEMA,
         null,
         viewDef,
         "SELECT * FROM TEST_SCHEMA.TEST_VIEW_NAME LIMIT 1",
@@ -272,33 +283,31 @@ public class TestViewSupport extends TestBaseViewSupport {
       final String viewDef1 = "SELECT region_id, sales_city FROM cp.`region.json`";
 
       // Create the view
-      createViewHelper(TEMP_SCHEMA, viewName, TEMP_SCHEMA, null, viewDef1);
+      createViewHelper(DFS_TMP_SCHEMA, viewName, DFS_TMP_SCHEMA, null, viewDef1);
 
       // Try to create the view with same name in same schema.
-      final String createViewSql = String.format("CREATE VIEW %s.`%s` AS %s", TEMP_SCHEMA, viewName, viewDef1);
+      final String createViewSql = String.format("CREATE VIEW %s.`%s` AS %s", DFS_TMP_SCHEMA, viewName, viewDef1);
       errorMsgTestHelper(createViewSql,
-          String.format("A view with given name [%s] already exists in schema [%s]", viewName, TEMP_SCHEMA));
+          String.format("A view with given name [%s] already exists in schema [%s]", viewName, DFS_TMP_SCHEMA));
 
       // Try creating the view with same name in same schema, but with CREATE OR REPLACE VIEW clause
       final String viewDef2 = "SELECT sales_state_province FROM cp.`region.json` ORDER BY `region_id`";
-      final String createOrReplaceViewSql = String.format("CREATE OR REPLACE VIEW %s.`%s` AS %s", TEMP_SCHEMA,
-          viewName, viewDef2);
 
       testBuilder()
-          .sqlQuery(createOrReplaceViewSql)
+          .sqlQuery("CREATE OR REPLACE VIEW %s.`%s` AS %s", DFS_TMP_SCHEMA, viewName, viewDef2)
           .unOrdered()
           .baselineColumns("ok", "summary")
           .baselineValues(true,
-              String.format("View '%s' replaced successfully in '%s' schema", viewName, TEMP_SCHEMA))
+              String.format("View '%s' replaced successfully in '%s' schema", viewName, DFS_TMP_SCHEMA))
           .go();
 
       // Make sure the new view created returns the data expected.
-      queryViewHelper(String.format("SELECT * FROM %s.`%s` LIMIT 1", TEMP_SCHEMA, viewName),
+      queryViewHelper(String.format("SELECT * FROM %s.`%s` LIMIT 1", DFS_TMP_SCHEMA, viewName),
           new String[]{"sales_state_province"},
           ImmutableList.of(new Object[]{"None"})
       );
     } finally {
-      dropViewHelper(TEMP_SCHEMA, viewName, TEMP_SCHEMA);
+      dropViewHelper(DFS_TMP_SCHEMA, viewName, DFS_TMP_SCHEMA);
     }
   }
 
@@ -310,19 +319,19 @@ public class TestViewSupport extends TestBaseViewSupport {
     try {
       final String tableDef1 = "SELECT region_id, sales_city FROM cp.`region.json`";
 
-      test(String.format("CREATE TABLE %s.%s as %s", TEMP_SCHEMA, tableName, tableDef1));
+      test("CREATE TABLE %s.%s as %s", DFS_TMP_SCHEMA, tableName, tableDef1);
 
       // Try to create the view with same name in same schema.
-      final String createViewSql = String.format("CREATE VIEW %s.`%s` AS %s", TEMP_SCHEMA, tableName, tableDef1);
+      final String createViewSql = String.format("CREATE VIEW %s.`%s` AS %s", DFS_TMP_SCHEMA, tableName, tableDef1);
       errorMsgTestHelper(createViewSql,
-          String.format("A non-view table with given name [%s] already exists in schema [%s]", tableName, TEMP_SCHEMA));
+          String.format("A non-view table with given name [%s] already exists in schema [%s]", tableName, DFS_TMP_SCHEMA));
 
       // Try creating the view with same name in same schema, but with CREATE OR REPLACE VIEW clause
       final String viewDef2 = "SELECT sales_state_province FROM cp.`region.json` ORDER BY `region_id`";
-      errorMsgTestHelper(String.format("CREATE OR REPLACE VIEW %s.`%s` AS %s", TEMP_SCHEMA, tableName, viewDef2),
-          String.format("A non-view table with given name [%s] already exists in schema [%s]", tableName, TEMP_SCHEMA));
+      errorMsgTestHelper(String.format("CREATE OR REPLACE VIEW %s.`%s` AS %s", DFS_TMP_SCHEMA, tableName, viewDef2),
+          String.format("A non-view table with given name [%s] already exists in schema [%s]", tableName, DFS_TMP_SCHEMA));
     } finally {
-      FileUtils.deleteQuietly(new File(getDfsTestTmpSchemaLocation(), tableName));
+      FileUtils.deleteQuietly(new File(dirTestWatcher.getDfsTestTmpDir(), tableName));
     }
   }
 
@@ -331,44 +340,44 @@ public class TestViewSupport extends TestBaseViewSupport {
     final String viewName = generateViewName();
 
     try {
-      test("USE " + TEMP_SCHEMA);
-      createViewHelper(null /*pass no schema*/, viewName, TEMP_SCHEMA, null,
+      test("USE " + DFS_TMP_SCHEMA);
+      createViewHelper(null /*pass no schema*/, viewName, DFS_TMP_SCHEMA, null,
           "SELECT cast(`employee_id` as integer) employeeid FROM cp.`employee.json`");
 
       // Test SHOW TABLES on view
       testBuilder()
-          .sqlQuery(String.format("SHOW TABLES like '%s'", viewName))
+          .sqlQuery("SHOW TABLES like '%s'", viewName)
           .unOrdered()
           .baselineColumns("TABLE_SCHEMA", "TABLE_NAME")
-          .baselineValues(TEMP_SCHEMA, viewName)
+          .baselineValues(DFS_TMP_SCHEMA, viewName)
           .go();
 
       // Test record in INFORMATION_SCHEMA.VIEWS
       testBuilder()
-          .sqlQuery(String.format("SELECT * FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = '%s'", viewName))
+          .sqlQuery("SELECT * FROM INFORMATION_SCHEMA.VIEWS WHERE TABLE_NAME = '%s'", viewName)
           .unOrdered()
           .baselineColumns("TABLE_CATALOG", "TABLE_SCHEMA", "TABLE_NAME", "VIEW_DEFINITION")
-          .baselineValues("DRILL", TEMP_SCHEMA, viewName,
-              "SELECT CAST(`employee_id` AS INTEGER) AS `employeeid`\n" + "FROM `cp`.`employee.json`")
+          .baselineValues("DRILL", DFS_TMP_SCHEMA, viewName,
+              "SELECT CAST(`employee_id` AS INTEGER) AS `employeeid`\nFROM `cp`.`employee.json`")
           .go();
 
       // Test record in INFORMATION_SCHEMA.TABLES
       testBuilder()
-          .sqlQuery(String.format("SELECT * FROM INFORMATION_SCHEMA.`TABLES` WHERE TABLE_NAME = '%s'", viewName))
+          .sqlQuery("SELECT * FROM INFORMATION_SCHEMA.`TABLES` WHERE TABLE_NAME = '%s'", viewName)
           .unOrdered()
           .baselineColumns("TABLE_CATALOG", "TABLE_SCHEMA", "TABLE_NAME", "TABLE_TYPE")
-          .baselineValues("DRILL", TEMP_SCHEMA, viewName, "VIEW")
+          .baselineValues("DRILL", DFS_TMP_SCHEMA, viewName, "VIEW")
           .go();
 
       // Test DESCRIBE view
       testBuilder()
-          .sqlQuery(String.format("DESCRIBE `%s`", viewName))
+          .sqlQuery("DESCRIBE `%s`", viewName)
           .unOrdered()
           .baselineColumns("COLUMN_NAME", "DATA_TYPE", "IS_NULLABLE")
           .baselineValues("employeeid", "INTEGER", "YES")
           .go();
     } finally {
-      dropViewHelper(TEMP_SCHEMA, viewName, TEMP_SCHEMA);
+      dropViewHelper(DFS_TMP_SCHEMA, viewName, DFS_TMP_SCHEMA);
     }
   }
 
@@ -377,35 +386,35 @@ public class TestViewSupport extends TestBaseViewSupport {
     final String viewName = generateViewName();
 
     try {
-      // Change default schema to just "dfs_test". View is actually created in "dfs_test.tmp" schema.
-      test("USE dfs_test");
+      // Change default schema to just "dfs". View is actually created in "dfs.tmp" schema.
+      test("USE dfs");
 
       // Create a view with with "tmp" schema identifier
-      createViewHelper("tmp", viewName, TEMP_SCHEMA, null,
+      createViewHelper("tmp", viewName, DFS_TMP_SCHEMA, null,
           "SELECT CAST(`employee_id` AS INTEGER) AS `employeeid`\n" + "FROM `cp`.`employee.json`");
 
       final String[] baselineColumns = new String[] { "employeeid" };
       final List<Object[]> baselineValues = ImmutableList.of(new Object[] { 1156 });
 
-      // Query view from current schema "dfs_test" by referring to the view using "tmp.viewName"
+      // Query view from current schema "dfs" by referring to the view using "tmp.viewName"
       queryViewHelper(
           String.format("SELECT * FROM %s.`%s` ORDER BY `employeeid` DESC LIMIT 1", "tmp", viewName),
           baselineColumns, baselineValues);
 
-      // Change the default schema to "dfs_test.tmp" and query view by referring to it using "viewName"
-      test("USE dfs_test.tmp");
+      // Change the default schema to "dfs.tmp" and query view by referring to it using "viewName"
+      test("USE dfs.tmp");
       queryViewHelper(
           String.format("SELECT * FROM `%s` ORDER BY `employeeid` DESC LIMIT 1", viewName),
           baselineColumns, baselineValues);
 
-      // Change the default schema to "cp" and query view by referring to it using "dfs_test.tmp.viewName";
+      // Change the default schema to "cp" and query view by referring to it using "dfs.tmp.viewName";
       test("USE cp");
       queryViewHelper(
-          String.format("SELECT * FROM %s.`%s` ORDER BY `employeeid` DESC LIMIT 1", "dfs_test.tmp", viewName),
+          String.format("SELECT * FROM %s.`%s` ORDER BY `employeeid` DESC LIMIT 1", "dfs.tmp", viewName),
           baselineColumns, baselineValues);
 
     } finally {
-      dropViewHelper(TEMP_SCHEMA, viewName, TEMP_SCHEMA);
+      dropViewHelper(DFS_TMP_SCHEMA, viewName, DFS_TMP_SCHEMA);
     }
   }
 
@@ -419,24 +428,24 @@ public class TestViewSupport extends TestBaseViewSupport {
       test("USE cp");
 
       // Create a view with full schema identifier and refer the "region.json" as without schema.
-      createViewHelper(TEMP_SCHEMA, viewName, TEMP_SCHEMA, null, "SELECT region_id, sales_city FROM `region.json`");
+      createViewHelper(DFS_TMP_SCHEMA, viewName, DFS_TMP_SCHEMA, null, "SELECT region_id, sales_city FROM `region.json`");
 
       final String[] baselineColumns = new String[] { "region_id", "sales_city" };
       final List<Object[]> baselineValues = ImmutableList.of(new Object[]{109L, "Santa Fe"});
 
       // Query the view
       queryViewHelper(
-          String.format("SELECT * FROM %s.`%s` ORDER BY region_id DESC LIMIT 1", "dfs_test.tmp", viewName),
+          String.format("SELECT * FROM %s.`%s` ORDER BY region_id DESC LIMIT 1", DFS_TMP_SCHEMA, viewName),
           baselineColumns, baselineValues);
 
-      // Change default schema to "dfs_test" and query by referring to the view using "tmp.viewName"
-      test("USE dfs_test");
+      // Change default schema to "dfs" and query by referring to the view using "tmp.viewName"
+      test("USE dfs");
       queryViewHelper(
-          String.format("SELECT * FROM %s.`%s` ORDER BY region_id DESC LIMIT 1", "tmp", viewName),
+          String.format("SELECT * FROM %s.`%s` ORDER BY region_id DESC LIMIT 1", TMP_SCHEMA, viewName),
           baselineColumns, baselineValues);
 
     } finally {
-      dropViewHelper(TEMP_SCHEMA, viewName, TEMP_SCHEMA);
+      dropViewHelper(DFS_TMP_SCHEMA, viewName, DFS_TMP_SCHEMA);
     }
   }
 
@@ -448,8 +457,8 @@ public class TestViewSupport extends TestBaseViewSupport {
     final String viewName = generateViewName();
 
     try {
-      test("USE " + TEMP_SCHEMA);
-      createViewHelper(null, viewName, TEMP_SCHEMA, "(id, name, bday)",
+      test("use %s", DFS_TMP_SCHEMA);
+      createViewHelper(null, viewName, DFS_TMP_SCHEMA, "(id, name, bday)",
           "SELECT " +
               "cast(`region_id` as integer), " +
               "cast(`full_name` as varchar(100)), " +
@@ -458,7 +467,7 @@ public class TestViewSupport extends TestBaseViewSupport {
 
       // Test DESCRIBE view
       testBuilder()
-          .sqlQuery(String.format("DESCRIBE `%s`", viewName))
+          .sqlQuery("DESCRIBE `%s`", viewName)
           .unOrdered()
           .baselineColumns("COLUMN_NAME", "DATA_TYPE", "IS_NULLABLE")
           .baselineValues("id", "INTEGER", "YES")
@@ -466,7 +475,7 @@ public class TestViewSupport extends TestBaseViewSupport {
           .baselineValues("bday", "DATE", "YES")
           .go();
     } finally {
-      dropViewHelper(TEMP_SCHEMA, viewName, TEMP_SCHEMA);
+      dropViewHelper(DFS_TMP_SCHEMA, viewName, DFS_TMP_SCHEMA);
     }
   }
 
@@ -531,7 +540,7 @@ public class TestViewSupport extends TestBaseViewSupport {
   @Category(UnlikelyTest.class)
   public void createViewWithUniqueColsInFieldListDuplicateColsInQuery1() throws Exception {
     testViewHelper(
-        TEMP_SCHEMA,
+        DFS_TMP_SCHEMA,
         "(regionid1, regionid2)",
         "SELECT region_id, region_id FROM cp.`region.json` LIMIT 1",
         "SELECT * FROM TEST_SCHEMA.TEST_VIEW_NAME",
@@ -546,7 +555,7 @@ public class TestViewSupport extends TestBaseViewSupport {
   @Category(UnlikelyTest.class)
   public void createViewWithUniqueColsInFieldListDuplicateColsInQuery2() throws Exception {
     testViewHelper(
-        TEMP_SCHEMA,
+        DFS_TMP_SCHEMA,
         "(regionid1, regionid2)",
         "SELECT t1.region_id, t2.region_id FROM cp.`region.json` t1 JOIN cp.`region.json` t2 " +
             "ON t1.region_id = t2.region_id LIMIT 1",
@@ -578,15 +587,15 @@ public class TestViewSupport extends TestBaseViewSupport {
   }
 
   private static void createViewErrorTestHelper(final String viewSql, final String expErrorMsg) throws Exception {
-    final String createViewSql = String.format(viewSql, TEMP_SCHEMA, "duplicateColumnsInViewDef");
+    final String createViewSql = String.format(viewSql, DFS_TMP_SCHEMA, "duplicateColumnsInViewDef");
     errorMsgTestHelper(createViewSql, expErrorMsg);
   }
 
   @Test // DRILL-2423
   @Category(UnlikelyTest.class)
   public void showProperMsgWhenDroppingNonExistentView() throws Exception{
-    errorMsgTestHelper("DROP VIEW dfs_test.tmp.nonExistentView",
-        "Unknown view [nonExistentView] in schema [dfs_test.tmp].");
+    errorMsgTestHelper("DROP VIEW dfs.tmp.nonExistentView",
+        "Unknown view [nonExistentView] in schema [dfs.tmp].");
   }
 
   @Test // DRILL-2423
@@ -601,13 +610,13 @@ public class TestViewSupport extends TestBaseViewSupport {
   public void showProperMsgWhenTryingToDropANonViewTable() throws Exception{
     final String testTableName = "testTableShowErrorMsg";
     try {
-      test(String.format("CREATE TABLE %s.%s AS SELECT c_custkey, c_nationkey from cp.`tpch/customer.parquet`",
-          TEMP_SCHEMA, testTableName));
+      test("CREATE TABLE %s.%s AS SELECT c_custkey, c_nationkey from cp.`tpch/customer.parquet`",
+        DFS_TMP_SCHEMA, testTableName);
 
-      errorMsgTestHelper(String.format("DROP VIEW %s.%s", TEMP_SCHEMA, testTableName),
-          "[testTableShowErrorMsg] is not a VIEW in schema [dfs_test.tmp]");
+      errorMsgTestHelper(String.format("DROP VIEW %s.%s", DFS_TMP_SCHEMA, testTableName),
+          "[testTableShowErrorMsg] is not a VIEW in schema [dfs.tmp]");
     } finally {
-      File tblPath = new File(getDfsTestTmpSchemaLocation(), testTableName);
+      File tblPath = new File(dirTestWatcher.getDfsTestTmpDir(), testTableName);
       FileUtils.deleteQuietly(tblPath);
     }
   }
@@ -617,9 +626,9 @@ public class TestViewSupport extends TestBaseViewSupport {
     final String existentViewName = generateViewName();
 
     // successful dropping of existent view
-    createViewHelper(TEMP_SCHEMA, existentViewName, TEMP_SCHEMA, null,
+    createViewHelper(DFS_TMP_SCHEMA, existentViewName, DFS_TMP_SCHEMA, null,
         "SELECT c_custkey, c_nationkey from cp.`tpch/customer.parquet`");
-    dropViewIfExistsHelper(TEMP_SCHEMA, existentViewName, TEMP_SCHEMA, true);
+    dropViewIfExistsHelper(DFS_TMP_SCHEMA, existentViewName, DFS_TMP_SCHEMA, true);
   }
 
   @Test // DRILL-4673
@@ -627,7 +636,7 @@ public class TestViewSupport extends TestBaseViewSupport {
     final String nonExistentViewName = generateViewName();
 
     // dropping of non existent view without error
-    dropViewIfExistsHelper(TEMP_SCHEMA, nonExistentViewName, TEMP_SCHEMA, false);
+    dropViewIfExistsHelper(DFS_TMP_SCHEMA, nonExistentViewName, DFS_TMP_SCHEMA, false);
   }
 
   @Test // DRILL-4673
@@ -635,11 +644,11 @@ public class TestViewSupport extends TestBaseViewSupport {
     final String tableName = "table_name";
     try{
       // dropping of non existent view without error if the table with such name is existed
-      test(String.format("CREATE TABLE %s.%s as SELECT region_id, sales_city FROM cp.`region.json`",
-          TEMP_SCHEMA, tableName));
-      dropViewIfExistsHelper(TEMP_SCHEMA, tableName, TEMP_SCHEMA, false);
+      test("CREATE TABLE %s.%s as SELECT region_id, sales_city FROM cp.`region.json`",
+        DFS_TMP_SCHEMA, tableName);
+      dropViewIfExistsHelper(DFS_TMP_SCHEMA, tableName, DFS_TMP_SCHEMA, false);
     } finally {
-      test(String.format("DROP TABLE IF EXISTS %s.%s ", TEMP_SCHEMA, tableName));
+      test("DROP TABLE IF EXISTS %s.%s ", DFS_TMP_SCHEMA, tableName);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestWithClause.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestWithClause.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestWithClause.java
index b901120..51821e9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestWithClause.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestWithClause.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.sql;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlTest;
 import org.junit.Ignore;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
index d1a16df..324fc73 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
@@ -19,30 +19,31 @@ package org.apache.drill.exec.store;
 
 import com.google.common.base.Charsets;
 import com.google.common.io.Files;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.common.types.TypeProtos;
-import org.apache.drill.common.util.FileUtils;
-import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.exec.util.JsonStringArrayList;
 import org.apache.drill.exec.util.Text;
 import org.apache.drill.test.rowSet.SchemaBuilder;
-import org.apache.hadoop.fs.Path;
-import org.junit.Before;
-import org.junit.Rule;
+import org.junit.BeforeClass;
 import org.junit.Test;
-import org.junit.rules.TemporaryFolder;
 
 import java.io.File;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 
 public class TestImplicitFileColumns extends BaseTestQuery {
-
+  public static final String CSV = "csv";
   public static final String MAIN = "main";
   public static final String NESTED = "nested";
-  public static final String CSV = "csv";
-  public final String JSON_TBL = "/scan/jsonTbl"; // 1990/1.json : {id:100, name: "John"}, 1991/2.json : {id: 1000, name : "Joe"}
-  public final String PARQUET_TBL = "/multilevel/parquet/";  // 1990/Q1/orders_1990_q1.parquet, ...
-  public final String CSV_TBL = "/multilevel/csv";  // 1990/Q1/orders_1990_q1.csv, ..
+  public static final String MAIN_FILE = MAIN + "." + CSV;
+  public static final String NESTED_FILE = NESTED + "." + CSV;
+  public static final Path FILES = Paths.get("files");
+  public static final Path NESTED_DIR = FILES.resolve(NESTED);
+  public static final Path JSON_TBL = Paths.get("scan", "jsonTbl"); // 1990/1.json : {id:100, name: "John"}, 1991/2.json : {id: 1000, name : "Joe"}
+  public static final Path PARQUET_TBL = Paths.get("multilevel", "parquet");  // 1990/Q1/orders_1990_q1.parquet, ...
+  public static final Path PARQUET_CHANGE_TBL = Paths.get("multilevel", "parquetWithSchemaChange");
+  public static final Path CSV_TBL = Paths.get("multilevel", "csv");  // 1990/Q1/orders_1990_q1.csv, ..
 
   private static final JsonStringArrayList<Text> mainColumnValues = new JsonStringArrayList<Text>() {{
     add(new Text(MAIN));
@@ -51,37 +52,40 @@ public class TestImplicitFileColumns extends BaseTestQuery {
     add(new Text(NESTED));
   }};
 
-  @Rule
-  public TemporaryFolder testFolder = new TemporaryFolder();
+  private static File mainFile;
+  private static File nestedFile;
 
-  private File mainFile;
-  private File nestedFolder;
-  private File nestedFile;
-
-  @Before
-  public void setup() throws Exception {
-    mainFile = testFolder.newFile(MAIN + "." + CSV);
+  @BeforeClass
+  public static void setup() throws Exception {
+    File files = dirTestWatcher.makeRootSubDir(FILES);
+    mainFile = new File(files, MAIN_FILE);
     Files.write(MAIN, mainFile, Charsets.UTF_8);
-    nestedFolder = testFolder.newFolder(NESTED);
-    nestedFile = new File(nestedFolder, NESTED + "." + CSV);
+    File nestedFolder = new File(files, NESTED);
+    nestedFolder.mkdirs();
+    nestedFile = new File(nestedFolder, NESTED_FILE);
     Files.write(NESTED, nestedFile, Charsets.UTF_8);
+
+    dirTestWatcher.copyResourceToRoot(JSON_TBL);
+    dirTestWatcher.copyResourceToRoot(PARQUET_TBL);
+    dirTestWatcher.copyResourceToRoot(CSV_TBL);
+    dirTestWatcher.copyResourceToRoot(PARQUET_CHANGE_TBL);
   }
 
   @Test
   public void testImplicitColumns() throws Exception {
     testBuilder()
-        .sqlQuery("select *, filename, suffix, fqn, filepath from dfs.`%s` order by filename", testFolder.getRoot().getPath())
+        .sqlQuery("select *, filename, suffix, fqn, filepath from dfs.`%s` order by filename", FILES)
         .ordered()
         .baselineColumns("columns", "dir0", "filename", "suffix", "fqn", "filepath")
-        .baselineValues(mainColumnValues, null, mainFile.getName(), CSV, new Path(mainFile.getPath()).toString(), new Path(mainFile.getParent()).toString())
-        .baselineValues(nestedColumnValues, NESTED, nestedFile.getName(), CSV, new Path(nestedFile.getPath()).toString(), new Path(nestedFile.getParent()).toString())
+        .baselineValues(mainColumnValues, null, mainFile.getName(), CSV, mainFile.getCanonicalPath(), mainFile.getParentFile().getCanonicalPath())
+        .baselineValues(nestedColumnValues, NESTED, NESTED_FILE, CSV, nestedFile.getCanonicalPath(), nestedFile.getParentFile().getCanonicalPath())
         .go();
   }
 
   @Test
   public void testImplicitColumnInWhereClause() throws Exception {
     testBuilder()
-        .sqlQuery("select * from dfs.`%s` where filename = '%s'", nestedFolder.getPath(), nestedFile.getName())
+        .sqlQuery("select * from dfs.`%s` where filename = '%s'", NESTED_DIR, NESTED_FILE)
         .unOrdered()
         .baselineColumns("columns")
         .baselineValues(nestedColumnValues)
@@ -91,27 +95,27 @@ public class TestImplicitFileColumns extends BaseTestQuery {
   @Test
   public void testImplicitColumnAlone() throws Exception {
     testBuilder()
-        .sqlQuery("select filename from dfs.`%s`", nestedFolder.getPath())
+        .sqlQuery("select filename from dfs.`%s`", NESTED_DIR)
         .unOrdered()
         .baselineColumns("filename")
-        .baselineValues(nestedFile.getName())
+        .baselineValues(NESTED_FILE)
         .go();
   }
 
   @Test
   public void testImplicitColumnWithTableColumns() throws Exception {
     testBuilder()
-        .sqlQuery("select columns, filename from dfs.`%s`", nestedFolder.getPath())
+        .sqlQuery("select columns, filename from dfs.`%s`", NESTED_DIR)
         .unOrdered()
         .baselineColumns("columns", "filename")
-        .baselineValues(nestedColumnValues, nestedFile.getName())
+        .baselineValues(nestedColumnValues, NESTED_FILE)
         .go();
   }
 
   @Test
   public void testCountStarWithImplicitColumnsInWhereClause() throws Exception {
     testBuilder()
-        .sqlQuery("select count(*) as cnt from dfs.`%s` where filename = '%s'", nestedFolder.getPath(), nestedFile.getName())
+        .sqlQuery("select count(*) as cnt from dfs.`%s` where filename = '%s'", NESTED_DIR, NESTED_FILE)
         .unOrdered()
         .baselineColumns("cnt")
         .baselineValues(1L)
@@ -121,10 +125,11 @@ public class TestImplicitFileColumns extends BaseTestQuery {
   @Test
   public void testImplicitAndPartitionColumnsInSelectClause() throws Exception {
     testBuilder()
-        .sqlQuery("select dir0, filename from dfs.`%s` order by filename", testFolder.getRoot().getPath()).ordered()
+        .sqlQuery("select dir0, filename from dfs.`%s` order by filename", FILES)
+        .ordered()
         .baselineColumns("dir0", "filename")
-        .baselineValues(null, mainFile.getName())
-        .baselineValues(NESTED, nestedFile.getName())
+        .baselineValues(null, MAIN_FILE)
+        .baselineValues(NESTED, NESTED_FILE)
         .go();
   }
 
@@ -143,8 +148,7 @@ public class TestImplicitFileColumns extends BaseTestQuery {
     try {
       test("alter session set `planner.enable_decimal_data_type` = true");
       testBuilder()
-          .sqlQuery(String.format("select max(dir0) as max_dir from dfs_test.`%s/src/test/resources/multilevel/parquetWithSchemaChange`",
-              TestTools.getWorkingPath()))
+          .sqlQuery("select max(dir0) as max_dir from dfs.`%s`", PARQUET_CHANGE_TBL)
           .unOrdered()
           .baselineColumns("max_dir")
           .baselineValues("voter50")
@@ -156,9 +160,6 @@ public class TestImplicitFileColumns extends BaseTestQuery {
 
   @Test
   public void testStarColumnJson() throws Exception {
-    final String rootEmpty = FileUtils.getResourceAsFile(JSON_TBL).toURI().toString();
-    final String query1 = String.format("select * from dfs_test.`%s` ", rootEmpty);
-
     final BatchSchema expectedSchema = new SchemaBuilder()
         .addNullable("dir0", TypeProtos.MinorType.VARCHAR)
         .addNullable("id", TypeProtos.MinorType.BIGINT)
@@ -166,7 +167,7 @@ public class TestImplicitFileColumns extends BaseTestQuery {
         .build();
 
     testBuilder()
-        .sqlQuery(query1)
+        .sqlQuery("select * from dfs.`%s` ", JSON_TBL)
         .schemaBaseLine(expectedSchema)
         .build()
         .run();
@@ -174,9 +175,6 @@ public class TestImplicitFileColumns extends BaseTestQuery {
 
   @Test
   public void testStarColumnParquet() throws Exception {
-    final String rootEmpty = FileUtils.getResourceAsFile(PARQUET_TBL).toURI().toString();
-    final String query1 = String.format("select * from dfs_test.`%s` ", rootEmpty);
-
     final BatchSchema expectedSchema = new SchemaBuilder()
         .addNullable("dir0", TypeProtos.MinorType.VARCHAR)
         .addNullable("dir1", TypeProtos.MinorType.VARCHAR)
@@ -192,7 +190,7 @@ public class TestImplicitFileColumns extends BaseTestQuery {
         .build();
 
     testBuilder()
-        .sqlQuery(query1)
+        .sqlQuery("select * from dfs.`%s` ", PARQUET_TBL)
         .schemaBaseLine(expectedSchema)
         .build()
         .run();
@@ -200,9 +198,6 @@ public class TestImplicitFileColumns extends BaseTestQuery {
 
   @Test
   public void testStarColumnCsv() throws Exception {
-    final String rootEmpty = FileUtils.getResourceAsFile(CSV_TBL).toURI().toString();
-    final String query1 = String.format("select * from dfs_test.`%s` ", rootEmpty);
-
     final BatchSchema expectedSchema = new SchemaBuilder()
         .addNullable("dir0", TypeProtos.MinorType.VARCHAR)
         .addNullable("dir1", TypeProtos.MinorType.VARCHAR)
@@ -210,10 +205,9 @@ public class TestImplicitFileColumns extends BaseTestQuery {
         .build();
 
     testBuilder()
-        .sqlQuery(query1)
+        .sqlQuery("select * from dfs.`%s` ", CSV_TBL)
         .schemaBaseLine(expectedSchema)
         .build()
         .run();
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestTimedRunnable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestTimedRunnable.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestTimedRunnable.java
index 4848a9d..3e6f118 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestTimedRunnable.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestTimedRunnable.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.store;
 
 import com.google.common.collect.Lists;
 import org.apache.drill.common.exceptions.UserException;
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.test.TestTools;
 import org.apache.drill.test.DrillTest;
 import org.apache.drill.categories.SlowTest;
 import org.junit.Rule;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroFormatTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroFormatTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroFormatTest.java
index f804e88..6436c1c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroFormatTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroFormatTest.java
@@ -18,8 +18,8 @@
 package org.apache.drill.exec.store.avro;
 
 import com.google.common.collect.Lists;
-import org.apache.drill.BaseTestQuery;
-import org.apache.drill.TestBuilder;
+import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.TestBuilder;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.exceptions.UserRemoteException;
 import org.apache.drill.exec.util.JsonStringHashMap;
@@ -29,7 +29,21 @@ import org.junit.Test;
 import java.util.List;
 import java.util.Map;
 
-import static org.apache.drill.TestBuilder.listOf;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateDoubleNestedSchema_NoNullValues;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateLinkedList;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateMapSchemaComplex_withNullValues;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateMapSchema_withNullValues;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateNestedArraySchema;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateSimpleArraySchema_NoNullValues;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateSimpleEnumSchema_NoNullValues;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateSimpleNestedSchema_NoNullValues;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateSimplePrimitiveSchema_NoNullValues;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateStringAndUtf8Data;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateUnionNestedArraySchema_withNullValues;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateUnionNestedSchema_withNullValues;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateUnionSchema_WithNonNullValues;
+import static org.apache.drill.exec.store.avro.AvroTestUtil.generateUnionSchema_WithNullValues;
+import static org.apache.drill.test.TestBuilder.listOf;
 
 /**
  * Unit tests for Avro record reader.
@@ -42,15 +56,12 @@ public class AvroFormatTest extends BaseTestQuery {
 
   @Test
   public void testBatchCutoff() throws Exception {
-
-    final AvroTestUtil.AvroTestRecordWriter testSetup = AvroTestUtil.generateSimplePrimitiveSchema_NoNullValues(5000);
-    final String file = testSetup.getFilePath();
-    final String sql =
-        "select a_string, b_int, c_long, d_float, e_double, f_bytes, h_boolean, g_null " +
-            "from dfs_test.`" + file + "`";
-    test(sql);
+    final AvroTestUtil.AvroTestRecordWriter testSetup = generateSimplePrimitiveSchema_NoNullValues(5000);
+    final String file = testSetup.getFileName();
+    final String sql = "select a_string, b_int, c_long, d_float, e_double, f_bytes, h_boolean, g_null from dfs.`%s`";
+    test(sql, file);
     testBuilder()
-        .sqlQuery(sql)
+        .sqlQuery(sql, file)
         .unOrdered()
         .expectsNumBatches(2)
         .baselineRecords(testSetup.getExpectedRecords())
@@ -67,24 +78,18 @@ public class AvroFormatTest extends BaseTestQuery {
    */
   @Test
   public void testFiltersOnVarchar() throws Exception {
-
-    final AvroTestUtil.AvroTestRecordWriter testSetup = AvroTestUtil.generateSimplePrimitiveSchema_NoNullValues(5000);
-    final String file = testSetup.getFilePath();
-    final String sql =
-        "select a_string " +
-            "from dfs_test.`" + file + "` where a_string = 'a_1'";
+    final String file = generateSimplePrimitiveSchema_NoNullValues(5000).getFileName();
+    final String sql = "select a_string from dfs.`%s` where a_string = 'a_1'";
     testBuilder()
-        .sqlQuery(sql)
+        .sqlQuery(sql, file)
         .unOrdered()
         .baselineColumns("a_string")
         .baselineValues("a_1")
         .go();
 
-    final String sql2 =
-        "select a_string " +
-            "from dfs_test.`" + file + "` where a_string IN ('a_1')";
+    final String sql2 = "select a_string from dfs.`%s` where a_string IN ('a_1')";
     testBuilder()
-        .sqlQuery(sql2)
+        .sqlQuery(sql2, file)
         .unOrdered()
         .baselineColumns("a_string")
         .baselineValues("a_1")
@@ -93,13 +98,10 @@ public class AvroFormatTest extends BaseTestQuery {
 
   @Test
   public void testFiltersOnVarBinary() throws Exception {
-    final AvroTestUtil.AvroTestRecordWriter testSetup = AvroTestUtil.generateSimplePrimitiveSchema_NoNullValues(5000);
-    final String file = testSetup.getFilePath();
-    final String sql =
-        "select f_bytes " +
-            "from dfs_test.`" + file + "` where f_bytes = BINARY_STRING('\\x61\\x31')";
+    final String file = generateSimplePrimitiveSchema_NoNullValues(5000).getFileName();
+    final String sql = "select f_bytes from dfs.`%s` where f_bytes = BINARY_STRING('\\x61\\x31')";
     TestBuilder testBuilder = testBuilder()
-        .sqlQuery(sql)
+        .sqlQuery(sql, file)
         .unOrdered()
         .baselineColumns("f_bytes");
 
@@ -108,11 +110,9 @@ public class AvroFormatTest extends BaseTestQuery {
     }
     testBuilder.go();
 
-    final String sql2 =
-        "select f_bytes " +
-            "from dfs_test.`" + file + "` where f_bytes IN (BINARY_STRING('\\x61\\x31'))";
+    final String sql2 = "select f_bytes from dfs.`%s` where f_bytes IN (BINARY_STRING('\\x61\\x31'))";
     testBuilder = testBuilder()
-        .sqlQuery(sql2)
+        .sqlQuery(sql2, file)
         .unOrdered()
         .baselineColumns("f_bytes");
 
@@ -124,15 +124,12 @@ public class AvroFormatTest extends BaseTestQuery {
 
   @Test
   public void testSimplePrimitiveSchema_NoNullValues() throws Exception {
-
-    final AvroTestUtil.AvroTestRecordWriter testSetup = AvroTestUtil.generateSimplePrimitiveSchema_NoNullValues();
-    final String file = testSetup.getFilePath();
-    final String sql =
-            "select a_string, b_int, c_long, d_float, e_double, f_bytes, h_boolean, g_null " +
-             "from dfs_test.`" + file + "`";
-    test(sql);
+    final AvroTestUtil.AvroTestRecordWriter testSetup = generateSimplePrimitiveSchema_NoNullValues();
+    final String file = testSetup.getFileName();
+    final String sql = "select a_string, b_int, c_long, d_float, e_double, f_bytes, h_boolean, g_null from dfs.`%s`";
+    test(sql, file);
     testBuilder()
-        .sqlQuery(sql)
+        .sqlQuery(sql, file)
         .unOrdered()
         .baselineRecords(testSetup.getExpectedRecords())
         .go();
@@ -140,7 +137,7 @@ public class AvroFormatTest extends BaseTestQuery {
 
   @Test
   public void testSimplePrimitiveSchema_StarQuery() throws Exception {
-    simpleAvroTestHelper(AvroTestUtil.generateSimplePrimitiveSchema_NoNullValues(), "select * from dfs_test.`%s`");
+    simpleAvroTestHelper(generateSimplePrimitiveSchema_NoNullValues(), "select * from dfs.`%s`");
   }
 
   private List<Map<String, Object>> project(
@@ -161,13 +158,11 @@ public class AvroFormatTest extends BaseTestQuery {
 
   @Test
   public void testSimplePrimitiveSchema_SelectColumnSubset() throws Exception {
-
-    final AvroTestUtil.AvroTestRecordWriter testSetup = AvroTestUtil.generateSimplePrimitiveSchema_NoNullValues();
-    final String file = testSetup.getFilePath();
-    final String sql = "select h_boolean, e_double from dfs_test.`" + file + "`";
+    final AvroTestUtil.AvroTestRecordWriter testSetup = generateSimplePrimitiveSchema_NoNullValues();
+    final String file = testSetup.getFileName();
     List<String> projectList = Lists.newArrayList("`h_boolean`", "`e_double`");
     testBuilder()
-        .sqlQuery(sql)
+        .sqlQuery("select h_boolean, e_double from dfs.`%s`", file)
         .unOrdered()
         .baselineRecords(project(testSetup.getExpectedRecords(), projectList))
         .go();
@@ -175,11 +170,9 @@ public class AvroFormatTest extends BaseTestQuery {
 
   @Test
   public void testSimplePrimitiveSchema_NoColumnsExistInTheSchema() throws Exception {
-
-    final String file = AvroTestUtil.generateSimplePrimitiveSchema_NoNullValues().getFilePath();
-    final String sql = "select h_dummy1, e_dummy2 from dfs_test.`" + file + "`";
+    final String file = generateSimplePrimitiveSchema_NoNullValues().getFileName();
     try {
-      test(sql);
+      test("select h_dummy1, e_dummy2 from dfs.`%s`", file);
       Assert.fail("Test should fail as h_dummy1 and e_dummy2 does not exist.");
     } catch(UserException ue) {
       Assert.assertTrue("Test should fail as h_dummy1 and e_dummy2 does not exist.",
@@ -189,11 +182,9 @@ public class AvroFormatTest extends BaseTestQuery {
 
   @Test
   public void testSimplePrimitiveSchema_OneExistAndOneDoesNotExistInTheSchema() throws Exception {
-
-    final String file = AvroTestUtil.generateSimplePrimitiveSchema_NoNullValues().getFilePath();
-    final String sql = "select h_boolean, e_dummy2 from dfs_test.`" + file + "`";
+    final String file = generateSimplePrimitiveSchema_NoNullValues().getFileName();
     try {
-      test(sql);
+      test("select h_boolean, e_dummy2 from dfs.`%s`", file);
       Assert.fail("Test should fail as e_dummy2 does not exist.");
     } catch(UserException ue) {
       Assert.assertTrue("Test should fail as e_dummy2 does not exist.", true);
@@ -202,80 +193,68 @@ public class AvroFormatTest extends BaseTestQuery {
 
   @Test
   public void testSimpleArraySchema_NoNullValues() throws Exception {
-    final String file = AvroTestUtil.generateSimpleArraySchema_NoNullValues().getFilePath();
-    final String sql = "select a_string, c_string_array[0], e_float_array[2] " +
-            "from dfs_test.`" + file + "`";
-    test(sql);
+    final String file = generateSimpleArraySchema_NoNullValues().getFileName();
+    final String sql = "select a_string, c_string_array[0], e_float_array[2] from dfs.`%s`";
+    test(sql, file);
   }
 
   @Test
   public void testSimpleArraySchema_StarQuery() throws Exception {
-    simpleAvroTestHelper(AvroTestUtil.generateSimpleArraySchema_NoNullValues(), "select * from dfs_test.`%s`");
+    simpleAvroTestHelper(generateSimpleArraySchema_NoNullValues(), "select * from dfs.`%s`");
   }
 
   @Test
   public void testDoubleNestedSchema_NoNullValues_NotAllColumnsProjected() throws Exception {
-    final String file = AvroTestUtil.generateDoubleNestedSchema_NoNullValues().getFilePath();
-    final String sql = "select t.c_record.nested_1_int, " +
-            "t.c_record.nested_1_record.double_nested_1_int " +
-            "from dfs_test.`" + file + "` t";
-    test(sql);
+    final String file = generateDoubleNestedSchema_NoNullValues().getFileName();
+    final String sql = "select t.c_record.nested_1_int, t.c_record.nested_1_record.double_nested_1_int from dfs.`%s` t";
+    test(sql, file);
   }
 
   @Test
   public void testSimpleNestedSchema_NoNullValues() throws Exception {
-
-    final AvroTestUtil.AvroTestRecordWriter testSetup = AvroTestUtil.generateSimpleNestedSchema_NoNullValues();
-    final String file = testSetup.getFilePath();
-    final String sql = "select a_string, b_int, t.c_record.nested_1_string, t.c_record.nested_1_int " +
-        "from dfs_test.`" + file + "` t";
-    test(sql);
+    final String file = generateSimpleNestedSchema_NoNullValues().getFileName();
+    final String sql = "select a_string, b_int, t.c_record.nested_1_string, t.c_record.nested_1_int from dfs.`%s` t";
+    test(sql, file);
   }
 
   @Test
   public void testSimpleNestedSchema_StarQuery() throws Exception {
-
-    final AvroTestUtil.AvroTestRecordWriter testSetup = AvroTestUtil.generateSimpleNestedSchema_NoNullValues();
-    final String file = testSetup.getFilePath();
-    final String sql = "select * from dfs_test.`" + file + "`";
+    final AvroTestUtil.AvroTestRecordWriter testSetup = generateSimpleNestedSchema_NoNullValues();
+    final String file = testSetup.getFileName();
     testBuilder()
-        .sqlQuery(sql)
+        .sqlQuery("select * from dfs.`%s`", file)
         .unOrdered()
         .baselineRecords(testSetup.getExpectedRecords())
         .go();
   }
   @Test
   public void testDoubleNestedSchema_NoNullValues() throws Exception {
-    final String file = AvroTestUtil.generateDoubleNestedSchema_NoNullValues().getFilePath();
+    final String file = generateDoubleNestedSchema_NoNullValues().getFileName();
     final String sql = "select a_string, b_int, t.c_record.nested_1_string, t.c_record.nested_1_int, " +
             "t.c_record.nested_1_record.double_nested_1_string, " +
             "t.c_record.nested_1_record.double_nested_1_int " +
-            "from dfs_test.`" + file + "` t";
-    test(sql);
+            "from dfs.`%s` t";
+    test(sql, file);
 
-    final String sql2 = "select t.c_record.nested_1_string " +
-        "from dfs_test.`" + file + "` t limit 1";
+    final String sql2 = "select t.c_record.nested_1_string from dfs.`%s` t limit 1";
     TestBuilder testBuilder = testBuilder()
-        .sqlQuery(sql2)
+        .sqlQuery(sql2, file)
         .unOrdered()
         .baselineColumns("EXPR$0");
     for (int i = 0; i < 1; i++) {
-      testBuilder
-          .baselineValues("nested_1_string_" + i);
+      testBuilder.baselineValues("nested_1_string_" + i);
     }
     testBuilder.go();
   }
 
   @Test
   public void testDoubleNestedSchema_StarQuery() throws Exception {
-    simpleAvroTestHelper(AvroTestUtil.generateDoubleNestedSchema_NoNullValues(), "select * from dfs_test.`%s`");
+    simpleAvroTestHelper(generateDoubleNestedSchema_NoNullValues(), "select * from dfs.`%s`");
   }
 
   private static void simpleAvroTestHelper(AvroTestUtil.AvroTestRecordWriter testSetup, final String sql) throws Exception {
-    final String file = testSetup.getFilePath();
-    final String sqlWithTable = String.format(sql, file);
     testBuilder()
-        .sqlQuery(sqlWithTable)
+        .sqlQuery(sql, testSetup.getFileName())
         .unOrdered()
         .baselineRecords(testSetup.getExpectedRecords())
         .go();
@@ -283,12 +262,12 @@ public class AvroFormatTest extends BaseTestQuery {
 
   @Test
   public void testSimpleEnumSchema_NoNullValues() throws Exception {
-    final AvroTestUtil.AvroTestRecordWriter testSetup = AvroTestUtil.generateSimpleEnumSchema_NoNullValues();
-    final String file = testSetup.getFilePath();
-    final String sql = "select a_string, b_enum from dfs_test.`" + file + "`";
+    final AvroTestUtil.AvroTestRecordWriter testSetup = generateSimpleEnumSchema_NoNullValues();
+    final String file = testSetup.getFileName();
+    final String sql = "select a_string, b_enum from dfs.`%s`";
     List<String> projectList = Lists.newArrayList("`a_string`", "`b_enum`");
     testBuilder()
-        .sqlQuery(sql)
+        .sqlQuery(sql, file)
         .unOrdered()
         .baselineRecords(project(testSetup.getExpectedRecords(), projectList))
         .go();
@@ -296,21 +275,19 @@ public class AvroFormatTest extends BaseTestQuery {
 
   @Test
   public void testSimpleEnumSchema_StarQuery() throws Exception {
-    simpleAvroTestHelper(AvroTestUtil.generateSimpleEnumSchema_NoNullValues(), "select * from dfs_test.`%s`");
+    simpleAvroTestHelper(generateSimpleEnumSchema_NoNullValues(), "select * from dfs.`%s`");
   }
 
   @Test
   public void testSimpleUnionSchema_StarQuery() throws Exception {
-    simpleAvroTestHelper(AvroTestUtil.generateUnionSchema_WithNullValues(), "select * from dfs_test.`%s`");
+    simpleAvroTestHelper(generateUnionSchema_WithNullValues(), "select * from dfs.`%s`");
   }
 
   @Test
   public void testShouldFailSimpleUnionNonNullSchema_StarQuery() throws Exception {
-
-    final String file = AvroTestUtil.generateUnionSchema_WithNonNullValues().getFilePath();
-    final String sql = "select * from dfs_test.`" + file + "`";
+    final String file = generateUnionSchema_WithNonNullValues().getFileName();
     try {
-      test(sql);
+      test("select * from dfs.`%s`", file);
       Assert.fail("Test should fail as union is only supported for optional fields");
     } catch(UserRemoteException e) {
       String message = e.getMessage();
@@ -320,25 +297,21 @@ public class AvroFormatTest extends BaseTestQuery {
 
   @Test
   public void testNestedUnionSchema_withNullValues() throws Exception {
-
-    final String file = AvroTestUtil.generateUnionNestedSchema_withNullValues().getFilePath();
-    final String sql = "select t.c_record.nested_1_string,t.c_record.nested_1_int from dfs_test.`" + file + "` t";
-    test(sql);
+    final String file = generateUnionNestedSchema_withNullValues().getFileName();
+    final String sql = "select t.c_record.nested_1_string,t.c_record.nested_1_int from dfs.`%s` t";
+    test(sql, file);
   }
 
-  /**
-   *  See <a href="https://issues.apache.org/jira/browse/DRILL-4574"></a>
-   *
-   */
+  // DRILL-4574"></a>
   @Test
   public void testFlattenPrimitiveArray() throws Exception {
-    final String file = AvroTestUtil.generateSimpleArraySchema_NoNullValues().getFilePath();
+    final String file = generateSimpleArraySchema_NoNullValues().getFileName();
+    final String sql = "select a_string, flatten(c_string_array) as array_item from dfs.`%s` t";
 
-    final String sql = "select a_string, flatten(c_string_array) as array_item "
-        + "from dfs_test.`" + file + "` t";
-
-    TestBuilder testBuilder = testBuilder().sqlQuery(sql).unOrdered()
-        .baselineColumns("a_string", "array_item");
+    TestBuilder testBuilder = testBuilder()
+      .sqlQuery(sql, file)
+      .unOrdered()
+      .baselineColumns("a_string", "array_item");
 
     for (int i = 0; i < AvroTestUtil.RECORD_COUNT; i++) {
 
@@ -347,30 +320,25 @@ public class AvroFormatTest extends BaseTestQuery {
       }
     }
 
-
     testBuilder.go();
-
   }
 
   private TestBuilder nestedArrayQueryTestBuilder(String file) {
-
     final String sql = "select rec_nr, array_item['nested_1_int'] as array_item_nested_int from "
-        + "(select a_int as rec_nr, flatten(t.b_array) as array_item " + "from dfs_test.`" + file + "` t) a";
+        + "(select a_int as rec_nr, flatten(t.b_array) as array_item from dfs.`%s` t) a";
 
-    TestBuilder testBuilder = testBuilder().sqlQuery(sql).unOrdered().baselineColumns("rec_nr",
-        "array_item_nested_int");
+    TestBuilder testBuilder = testBuilder()
+      .sqlQuery(sql, file)
+      .unOrdered()
+      .baselineColumns("rec_nr", "array_item_nested_int");
 
     return testBuilder;
-
   }
 
-
-  /**
-   * See <a href="https://issues.apache.org/jira/browse/DRILL-4574"></a>
-   */
+  //DRILL-4574
   @Test
   public void testFlattenComplexArray() throws Exception {
-    final String file = AvroTestUtil.generateNestedArraySchema().getFilePath();
+    final String file = generateNestedArraySchema().getFileName();
 
     TestBuilder testBuilder = nestedArrayQueryTestBuilder(file);
     for (int i = 0; i < AvroTestUtil.RECORD_COUNT; i++) {
@@ -381,40 +349,36 @@ public class AvroFormatTest extends BaseTestQuery {
     testBuilder.go();
 
   }
-  /**
-   * See <a href="https://issues.apache.org/jira/browse/DRILL-4574"></a>
-   */
+
+  //DRILL-4574
   @Test
   public void testFlattenEmptyComplexArrayMustYieldNoResults() throws Exception {
-    final String file = AvroTestUtil.generateNestedArraySchema(AvroTestUtil.RECORD_COUNT, 0).getFilePath();
+    final String file = generateNestedArraySchema(AvroTestUtil.RECORD_COUNT, 0).getFilePath();
     TestBuilder testBuilder = nestedArrayQueryTestBuilder(file);
     testBuilder.expectsEmptyResultSet();
   }
 
   @Test
   public void testNestedUnionArraySchema_withNullValues() throws Exception {
-
-    final String file = AvroTestUtil.generateUnionNestedArraySchema_withNullValues().getFilePath();
-    final String sql = "select t.c_array[0].nested_1_string,t.c_array[0].nested_1_int from dfs_test.`" + file + "` t";
-    test(sql);
+    final String file = generateUnionNestedArraySchema_withNullValues().getFileName();
+    final String sql = "select t.c_array[0].nested_1_string,t.c_array[0].nested_1_int from dfs.`%s` t";
+    test(sql, file);
   }
 
   @Test
   public void testMapSchema_withNullValues() throws Exception {
-
-    final String file = AvroTestUtil.generateMapSchema_withNullValues().getFilePath();
-    final String sql = "select c_map['key1'],c_map['key2'] from dfs_test.`" + file + "`";
-    test(sql);
+    final String file = generateMapSchema_withNullValues().getFileName();
+    final String sql = "select c_map['key1'],c_map['key2'] from dfs.`%s`";
+    test(sql, file);
   }
 
   @Test
   public void testMapSchemaComplex_withNullValues() throws Exception {
-
-    final String file = AvroTestUtil.generateMapSchemaComplex_withNullValues().getFilePath();
-    final String sql = "select d_map['key1'] nested_key1, d_map['key2'] nested_key2 from dfs_test.`" + file + "`";
+    final String file = generateMapSchemaComplex_withNullValues().getFileName();
+    final String sql = "select d_map['key1'] nested_key1, d_map['key2'] nested_key2 from dfs.`%s`";
 
     TestBuilder testBuilder = testBuilder()
-        .sqlQuery(sql)
+        .sqlQuery(sql, file)
         .unOrdered()
         .baselineColumns("nested_key1", "nested_key2");
 
@@ -432,26 +396,25 @@ public class AvroFormatTest extends BaseTestQuery {
 
   @Test
   public void testStringAndUtf8Data() throws Exception {
-    simpleAvroTestHelper(AvroTestUtil.generateStringAndUtf8Data(), "select * from dfs_test.`%s`");
+    simpleAvroTestHelper(generateStringAndUtf8Data(), "select * from dfs.`%s`");
   }
 
   @Test
   public void testLinkedList() throws Exception {
-    final String file = AvroTestUtil.generateLinkedList();
-    final String sql = "select * from dfs_test.`" + file + "`";
-    test(sql);
+    final String file = generateLinkedList();
+    final String sql = "select * from dfs.`%s`";
+    test(sql, file);
   }
 
   @Test
   public void testCountStar() throws Exception {
-    final String file = AvroTestUtil.generateStringAndUtf8Data().getFilePath();
-    final String sql = "select count(*) as row_count from dfs_test.`" + file + "`";
+    final String file = generateStringAndUtf8Data().getFileName();
+    final String sql = "select count(*) as row_count from dfs.`%s`";
     testBuilder()
-        .sqlQuery(sql)
+        .sqlQuery(sql, file)
         .ordered()
         .baselineColumns("row_count")
         .baselineValues((long)AvroTestUtil.RECORD_COUNT)
         .go();
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java
index 86d29ae..1cb9284 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java
@@ -37,10 +37,10 @@ import org.apache.avro.generic.GenericData;
 import org.apache.avro.generic.GenericDatumWriter;
 import org.apache.avro.generic.GenericRecord;
 
-import com.google.common.base.Charsets;
 import org.apache.drill.exec.util.JsonStringArrayList;
 import org.apache.drill.exec.util.JsonStringHashMap;
 import org.apache.drill.exec.util.Text;
+import org.apache.drill.test.BaseTestQuery;
 
 /**
  * Utilities for generating Avro test data.
@@ -62,6 +62,7 @@ public class AvroTestUtil {
     private Schema schema;
     private final DataFileWriter<GenericData.Record> writer;
     private final String filePath;
+    private final String fileName;
 
     private AvroTestRecordWriter(Schema schema, File file) {
       writer = new DataFileWriter<GenericData.Record>(new GenericDatumWriter<GenericData.Record>(schema));
@@ -74,6 +75,7 @@ public class AvroTestUtil {
       currentExpectedRecord = new TreeMap<>();
       expectedRecords = new ArrayList<>();
       filePath = file.getAbsolutePath();
+      fileName = file.getName();
     }
 
     public void startRecord() {
@@ -134,6 +136,10 @@ public class AvroTestUtil {
       return filePath;
     }
 
+    public String getFileName() {
+      return fileName;
+    }
+
     public List<Map<String, Object>>getExpectedRecords() {
       return expectedRecords;
     }
@@ -145,7 +151,6 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter generateSimplePrimitiveSchema_NoNullValues(int numRecords) throws Exception {
-
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest")
             .namespace("org.apache.drill.exec.store.avro")
             .fields()
@@ -159,10 +164,9 @@ public class AvroTestUtil {
             .name("h_boolean").type().booleanType().noDefault()
             .endRecord();
 
-    final File file = File.createTempFile("avro-primitive-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-primitive-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final AvroTestRecordWriter record = new AvroTestRecordWriter(schema, file);
+
     try {
       ByteBuffer bb = ByteBuffer.allocate(2);
       bb.put(0, (byte) 'a');
@@ -189,7 +193,6 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter generateUnionSchema_WithNullValues() throws Exception {
-
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest")
             .namespace("org.apache.drill.exec.store.avro")
             .fields()
@@ -204,12 +207,10 @@ public class AvroTestUtil {
             .name("i_union").type().optional().doubleType()
             .endRecord();
 
-    final File file = File.createTempFile("avro-primitive-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-primitive-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final AvroTestRecordWriter record = new AvroTestRecordWriter(schema, file);
-    try {
 
+    try {
       ByteBuffer bb = ByteBuffer.allocate(1);
       bb.put(0, (byte) 1);
 
@@ -234,7 +235,6 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter generateUnionSchema_WithNonNullValues() throws Exception {
-
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest")
             .namespace("org.apache.drill.exec.store.avro")
             .fields()
@@ -249,9 +249,7 @@ public class AvroTestUtil {
             .name("i_union").type().unionOf().doubleType().and().longType().endUnion().noDefault()
             .endRecord();
 
-    final File file = File.createTempFile("avro-primitive-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-primitive-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final AvroTestRecordWriter record = new AvroTestRecordWriter(schema, file);
     try {
 
@@ -279,7 +277,6 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter generateSimpleEnumSchema_NoNullValues() throws Exception {
-
     final String[] symbols = { "E_SYM_A", "E_SYM_B", "E_SYM_C", "E_SYM_D" };
 
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest")
@@ -289,9 +286,7 @@ public class AvroTestUtil {
             .name("b_enum").type().enumeration("my_enum").symbols(symbols).noDefault()
             .endRecord();
 
-    final File file = File.createTempFile("avro-primitive-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-primitive-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final Schema enumSchema = schema.getField("b_enum").schema();
 
     final AvroTestRecordWriter record = new AvroTestRecordWriter(schema, file);
@@ -313,10 +308,7 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter generateSimpleArraySchema_NoNullValues() throws Exception {
-
-    final File file = File.createTempFile("avro-array-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-array-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest")
             .namespace("org.apache.drill.exec.store.avro")
             .fields()
@@ -364,10 +356,7 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter generateSimpleNestedSchema_NoNullValues() throws Exception {
-
-    final File file = File.createTempFile("avro-nested-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-nested-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest")
             .namespace("org.apache.drill.exec.store.avro")
             .fields()
@@ -406,10 +395,7 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter generateUnionNestedArraySchema_withNullValues() throws Exception {
-
-    final File file = File.createTempFile("avro-nested-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-nested-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest")
             .namespace("org.apache.drill.exec.store.avro")
             .fields()
@@ -455,10 +441,7 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter generateNestedArraySchema(int numRecords, int numArrayItems) throws IOException {
-
-    final File file = File.createTempFile("avro-nested-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-nested-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest").namespace("org.apache.drill.exec.store.avro")
         .fields().name("a_int").type().intType().noDefault().name("b_array").type().array().items()
         .record("my_record_1").namespace("foo.blah.org").fields().name("nested_1_int").type().optional().intType()
@@ -490,10 +473,7 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter generateMapSchema_withNullValues() throws Exception {
-
-    final File file = File.createTempFile("avro-nested-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-nested-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest")
             .namespace("org.apache.drill.exec.store.avro")
             .fields()
@@ -525,10 +505,7 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter generateMapSchemaComplex_withNullValues() throws Exception {
-
-    final File file = File.createTempFile("avro-nested-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-nested-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest")
             .namespace("org.apache.drill.exec.store.avro")
             .fields()
@@ -574,10 +551,7 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter generateUnionNestedSchema_withNullValues() throws Exception {
-
-    final File file = File.createTempFile("avro-nested-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-nested-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest")
             .namespace("org.apache.drill.exec.store.avro")
             .fields()
@@ -616,10 +590,7 @@ public class AvroTestUtil {
   }
 
   public static AvroTestRecordWriter generateDoubleNestedSchema_NoNullValues() throws Exception {
-
-    final File file = File.createTempFile("avro-double-nested-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-double-nested-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final Schema schema = SchemaBuilder.record("AvroRecordReaderTest")
             .namespace("org.apache.drill.exec.store.avro")
             .fields()
@@ -672,10 +643,7 @@ public class AvroTestUtil {
   }
 
   public static String generateLinkedList() throws Exception {
-
-    final File file = File.createTempFile("avro-linkedlist", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-linkedlist", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final Schema schema = SchemaBuilder.record("LongList")
             .namespace("org.apache.drill.exec.store.avro")
             .aliases("LinkedLongs")
@@ -703,7 +671,7 @@ public class AvroTestUtil {
       writer.close();
     }
 
-    return file.getAbsolutePath();
+    return file.getName();
   }
 
   public static AvroTestRecordWriter generateStringAndUtf8Data() throws Exception {
@@ -715,9 +683,7 @@ public class AvroTestUtil {
             .name("b_utf8").type().stringType().noDefault()
             .endRecord();
 
-    final File file = File.createTempFile("avro-primitive-test", ".avro");
-    file.deleteOnExit();
-
+    final File file = File.createTempFile("avro-primitive-test", ".avro", BaseTestQuery.dirTestWatcher.getRootDir());
     final AvroTestRecordWriter record = new AvroTestRecordWriter(schema, file);
     try {
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/store/bson/TestBsonRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/bson/TestBsonRecordReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/bson/TestBsonRecordReader.java
index 1919184..1b2801d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/bson/TestBsonRecordReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/bson/TestBsonRecordReader.java
@@ -23,7 +23,7 @@ import static org.junit.Assert.assertTrue;
 import java.io.IOException;
 import java.util.Arrays;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.store.TestOutputMutator;
 import org.apache.drill.exec.vector.complex.impl.SingleMapReaderImpl;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java
index d23cd1f..f74bc55 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestFileSelection.java
@@ -20,11 +20,11 @@ package org.apache.drill.exec.store.dfs;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
 
+import java.nio.file.Paths;
 import java.util.List;
 
 import com.google.common.collect.ImmutableList;
-import org.apache.drill.BaseTestQuery;
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.hadoop.fs.FileStatus;
 import org.junit.Test;
 
@@ -46,15 +46,14 @@ public class TestFileSelection extends BaseTestQuery {
     }
   }
 
-
   @Test(expected = Exception.class)
   public void testEmptyFolderThrowsTableNotFound() throws Exception {
-    final String table = String.format("%s/empty", TestTools.getTestResourcesPath());
-    final String query = String.format("select * from dfs.`%s`", table);
+    final String emptyDirPath = dirTestWatcher.makeRootSubDir(Paths.get("empty")).getAbsolutePath();
+    final String query = String.format("select * from dfs.`%s`", emptyDirPath);
     try {
       testNoResult(query);
     } catch (Exception ex) {
-      final String pattern = String.format("%s' not found", table).toLowerCase();
+      final String pattern = String.format("%s' not found", emptyDirPath).toLowerCase();
       final boolean isTableNotFound = ex.getMessage().toLowerCase().contains(pattern);
       assertTrue(isTableNotFound);
       throw ex;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestGlob.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestGlob.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestGlob.java
index 59cb22b..8dae43f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestGlob.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/dfs/TestGlob.java
@@ -6,9 +6,9 @@
  * to you under the Apache License, Version 2.0 (the
  * "License"); you may not use this file except in compliance
  * with the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
+ * <p>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p>
  * Unless required by applicable law or agreed to in writing, software
  * distributed under the License is distributed on an "AS IS" BASIS,
  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
@@ -17,54 +17,62 @@
  */
 package org.apache.drill.exec.store.dfs;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.UnlikelyTest;
-import org.apache.drill.common.util.TestTools;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.nio.file.Paths;
+
 @Category(UnlikelyTest.class)
 public class TestGlob extends BaseTestQuery {
 
-    String MULTILEVEL = TestTools.getWorkingPath() + "/../java-exec/src/test/resources/multilevel";
+  @BeforeClass
+  public static void setupTestFiles() {
+    dirTestWatcher.copyResourceToRoot(Paths.get("multilevel"));
+  }
 
-    @Test
-    public void testGlobSet() throws Exception {
-        testBuilder()
-            .sqlQuery(String.format("select count(*) from dfs_test.`%s/parquet/{1994,1995}`", MULTILEVEL))
-            .unOrdered()
-            .baselineColumns("EXPR$0")
-            .baselineValues(80L)
-            .build().run();
-    }
+  @Test
+  public void testGlobSet() throws Exception {
+    testBuilder()
+      .sqlQuery("select count(*) from dfs.`multilevel/parquet/{1994,1995}`")
+      .unOrdered().baselineColumns("EXPR$0")
+      .baselineValues(80L)
+      .build()
+      .run();
+  }
 
-    @Test
-    public void testGlobWildcard() throws Exception {
-        testBuilder()
-            .sqlQuery(String.format("select count(*) from dfs_test.`%s/parquet/1994/*`", MULTILEVEL))
-            .unOrdered()
-            .baselineColumns("EXPR$0")
-            .baselineValues(40L)
-            .build().run();
-    }
+  @Test
+  public void testGlobWildcard() throws Exception {
+    testBuilder()
+      .sqlQuery("select count(*) from dfs.`multilevel/parquet/1994/*`")
+      .unOrdered()
+      .baselineColumns("EXPR$0")
+      .baselineValues(40L)
+      .build()
+      .run();
+  }
 
-    @Test
-    public void testGlobSingleCharacter() throws Exception {
-        testBuilder()
-            .sqlQuery(String.format("select count(*) from dfs_test.`%s/parquet/199?/*`", MULTILEVEL))
-            .unOrdered()
-            .baselineColumns("EXPR$0")
-            .baselineValues(120L)
-            .build().run();
-    }
+  @Test
+  public void testGlobSingleCharacter() throws Exception {
+    testBuilder()
+      .sqlQuery("select count(*) from dfs.`multilevel/parquet/199?/*`")
+      .unOrdered()
+      .baselineColumns("EXPR$0")
+      .baselineValues(120L)
+      .build()
+      .run();
+  }
 
-    @Test
-    public void testGlobSingleCharacterRange() throws Exception {
-        testBuilder()
-            .sqlQuery(String.format("select count(*) from dfs_test.`%s/parquet/199[4-5]/*`", MULTILEVEL))
-            .unOrdered()
-            .baselineColumns("EXPR$0")
-            .baselineValues(80L)
-            .build().run();
-    }
+  @Test
+  public void testGlobSingleCharacterRange() throws Exception {
+    testBuilder()
+      .sqlQuery("select count(*) from dfs.`multilevel/parquet/199[4-5]/*`")
+      .unOrdered()
+      .baselineColumns("EXPR$0")
+      .baselineValues(80L)
+      .build()
+      .run();
+  }
 }


Mime
View raw message