drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From prog...@apache.org
Subject [17/22] drill git commit: DRILL-5783, DRILL-5841, DRILL-5894: Rationalize test temp directories
Date Wed, 15 Nov 2017 01:47:03 GMT
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestPartitionFilter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestPartitionFilter.java b/exec/java-exec/src/test/java/org/apache/drill/TestPartitionFilter.java
index 0ff7a12..6f9f088 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestPartitionFilter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestPartitionFilter.java
@@ -17,25 +17,19 @@
  */
 package org.apache.drill;
 
-
 import static org.junit.Assert.assertEquals;
 
 import org.apache.drill.categories.PlannerTest;
 import org.apache.drill.categories.SqlTest;
 import org.apache.drill.categories.UnlikelyTest;
-import org.apache.drill.common.util.FileUtils;
-import org.apache.drill.common.util.TestTools;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.nio.file.Paths;
+
 @Category({SqlTest.class, PlannerTest.class})
 public class TestPartitionFilter extends PlanTestBase {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestPartitionFilter.class);
-
-  static final String WORKING_PATH = TestTools.getWorkingPath();
-  static final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources";
-
   private static void testExcludeFilter(String query, int expectedNumFiles,
       String excludedFilterPattern, int expectedRowCount) throws Exception {
     int actualRowCount = testSql(query);
@@ -54,291 +48,289 @@ public class TestPartitionFilter extends PlanTestBase {
 
   @BeforeClass
   public static void createParquetTable() throws Exception {
+    dirTestWatcher.copyResourceToRoot(Paths.get("multilevel"));
+
     test("alter session set `planner.disable_exchanges` = true");
-    test(String.format("create table dfs_test.tmp.parquet partition by (yr, qrtr) as select o_orderkey, o_custkey, " +
+    test("create table dfs.tmp.parquet partition by (yr, qrtr) as select o_orderkey, o_custkey, " +
         "o_orderstatus, o_totalprice, o_orderdate, o_orderpriority, o_clerk, o_shippriority, o_comment, cast(dir0 as int) yr, dir1 qrtr " +
-        "from dfs_test.`%s/multilevel/parquet`", TEST_RES_PATH));
+        "from dfs.`multilevel/parquet`");
     test("alter session set `planner.disable_exchanges` = false");
   }
 
   @Test  //Parquet: basic test with dir0 and dir1 filters
   public void testPartitionFilter1_Parquet() throws Exception {
-    String query = String.format("select dir0, dir1, o_custkey, o_orderdate from dfs_test.`%s/multilevel/parquet` where dir0=1994 and dir1='Q1'", TEST_RES_PATH);
-    testExcludeFilter(query, 1, "Filter", 10);
+    String query = "select dir0, dir1, o_custkey, o_orderdate from dfs.`multilevel/parquet` where dir0=1994 and dir1='Q1'";
+    testExcludeFilter(query, 1, "Filter\\(", 10);
   }
 
   @Test  //Parquet: basic test with dir0 and dir1 filters
   public void testPartitionFilter1_Parquet_from_CTAS() throws Exception {
-    String query = String.format("select yr, qrtr, o_custkey, o_orderdate from dfs_test.tmp.parquet where yr=1994 and qrtr='Q1'", TEST_RES_PATH);
-    testExcludeFilter(query, 1, "Filter", 10);
+    String query = "select yr, qrtr, o_custkey, o_orderdate from dfs.tmp.parquet where yr=1994 and qrtr='Q1'";
+    testExcludeFilter(query, 1, "Filter\\(", 10);
   }
 
   @Test  //Json: basic test with dir0 and dir1 filters
   public void testPartitionFilter1_Json() throws Exception {
-    String query = String.format("select dir0, dir1, o_custkey, o_orderdate from dfs_test.`%s/multilevel/json` where dir0=1994 and dir1='Q1'", TEST_RES_PATH);
-    testExcludeFilter(query, 1, "Filter", 10);
+    String query = "select dir0, dir1, o_custkey, o_orderdate from dfs.`multilevel/json` where dir0=1994 and dir1='Q1'";
+    testExcludeFilter(query, 1, "Filter\\(", 10);
   }
 
   @Test  //Json: basic test with dir0 and dir1 filters
   public void testPartitionFilter1_JsonFileMixDir() throws Exception {
-    String query = String.format("select dir0, dir1, o_custkey, o_orderdate from dfs_test.`%s/multilevel/jsonFileMixDir` where dir0=1995 and dir1='Q1'", TEST_RES_PATH);
-    testExcludeFilter(query, 1, "Filter", 10);
+    String query = "select dir0, dir1, o_custkey, o_orderdate from dfs.`multilevel/jsonFileMixDir` where dir0=1995 and dir1='Q1'";
+    testExcludeFilter(query, 1, "Filter\\(", 10);
   }
 
   @Test  //Json: basic test with dir0 = and dir1 is null filters
   public void testPartitionFilterIsNull_JsonFileMixDir() throws Exception {
-    String query = String.format("select dir0, dir1, o_custkey, o_orderdate from dfs_test.`%s/multilevel/jsonFileMixDir` where dir0=1995 and dir1 is null", TEST_RES_PATH);
-    testExcludeFilter(query, 1, "Filter", 5);
+    String query = "select dir0, dir1, o_custkey, o_orderdate from dfs.`multilevel/jsonFileMixDir` where dir0=1995 and dir1 is null";
+    testExcludeFilter(query, 1, "Filter\\(", 5);
   }
 
   @Test  //Json: basic test with dir0 = and dir1 is not null filters
   public void testPartitionFilterIsNotNull_JsonFileMixDir() throws Exception {
-    String query = String.format("select dir0, dir1, o_custkey, o_orderdate from dfs_test.`%s/multilevel/jsonFileMixDir` where dir0=1995 and dir1 is not null", TEST_RES_PATH);
-    testExcludeFilter(query, 4, "Filter", 40);
+    String query = "select dir0, dir1, o_custkey, o_orderdate from dfs.`multilevel/jsonFileMixDir` where dir0=1995 and dir1 is not null";
+    testExcludeFilter(query, 4, "Filter\\(", 40);
   }
 
   @Test  //CSV: basic test with dir0 and dir1 filters in
   public void testPartitionFilter1_Csv() throws Exception {
-    String query = String.format("select * from dfs_test.`%s/multilevel/csv` where dir0=1994 and dir1='Q1'", TEST_RES_PATH);
-    testExcludeFilter(query, 1, "Filter", 10);
+    String query = "select * from dfs.`multilevel/csv` where dir0=1994 and dir1='Q1'";
+    testExcludeFilter(query, 1, "Filter\\(", 10);
   }
 
   @Test //Parquet: partition filters are combined with regular columns in an AND
   public void testPartitionFilter2_Parquet() throws Exception {
-    String query = String.format("select dir0, dir1, o_custkey, o_orderdate from dfs_test.`%s/multilevel/parquet` where o_custkey < 1000 and dir0=1994 and dir1='Q1'", TEST_RES_PATH);
-    testIncludeFilter(query, 1, "Filter", 5);
+    String query = "select dir0, dir1, o_custkey, o_orderdate from dfs.`multilevel/parquet` where o_custkey < 1000 and dir0=1994 and dir1='Q1'";
+    testIncludeFilter(query, 1, "Filter\\(", 5);
   }
 
   @Test //Parquet: partition filters are combined with regular columns in an AND
   public void testPartitionFilter2_Parquet_from_CTAS() throws Exception {
-    String query = String.format("select yr, qrtr, o_custkey, o_orderdate from dfs_test.tmp.parquet where o_custkey < 1000 and yr=1994 and qrtr='Q1'", TEST_RES_PATH);
-    testIncludeFilter(query, 1, "Filter", 5);
+    String query = "select yr, qrtr, o_custkey, o_orderdate from dfs.tmp.parquet where o_custkey < 1000 and yr=1994 and qrtr='Q1'";
+    testIncludeFilter(query, 1, "Filter\\(", 5);
   }
 
   @Test //Json: partition filters are combined with regular columns in an AND
   public void testPartitionFilter2_Json() throws Exception {
-    String query = String.format("select dir0, dir1, o_custkey, o_orderdate from dfs_test.`%s/multilevel/json` where o_custkey < 1000 and dir0=1994 and dir1='Q1'", TEST_RES_PATH);
-    testIncludeFilter(query, 1, "Filter", 5);
+    String query = "select dir0, dir1, o_custkey, o_orderdate from dfs.`multilevel/json` where o_custkey < 1000 and dir0=1994 and dir1='Q1'";
+    testIncludeFilter(query, 1, "Filter\\(", 5);
   }
 
   @Test //CSV: partition filters are combined with regular columns in an AND
   public void testPartitionFilter2_Csv() throws Exception {
-    String query = String.format("select * from dfs_test.`%s/multilevel/csv` where columns[1] < 1000 and dir0=1994 and dir1='Q1'", TEST_RES_PATH);
-    testIncludeFilter(query, 1, "Filter", 5);
+    String query = "select * from dfs.`multilevel/csv` where columns[1] < 1000 and dir0=1994 and dir1='Q1'";
+    testIncludeFilter(query, 1, "Filter\\(", 5);
   }
 
   @Test //Parquet: partition filters are ANDed and belong to a top-level OR
   public void testPartitionFilter3_Parquet() throws Exception {
-    String query = String.format("select * from dfs_test.`%s/multilevel/parquet` where (dir0=1994 and dir1='Q1' and o_custkey < 500) or (dir0=1995 and dir1='Q2' and o_custkey > 500)", TEST_RES_PATH);
-    testIncludeFilter(query, 2, "Filter", 8);
+    String query = "select * from dfs.`multilevel/parquet` where (dir0=1994 and dir1='Q1' and o_custkey < 500) or (dir0=1995 and dir1='Q2' and o_custkey > 500)";
+    testIncludeFilter(query, 2, "Filter\\(", 8);
   }
   @Test //Parquet: partition filters are ANDed and belong to a top-level OR
   public void testPartitionFilter3_Parquet_from_CTAS() throws Exception {
-    String query = String.format("select * from dfs_test.tmp.parquet where (yr=1994 and qrtr='Q1' and o_custkey < 500) or (yr=1995 and qrtr='Q2' and o_custkey > 500)", TEST_RES_PATH);
-    testIncludeFilter(query, 2, "Filter", 8);
+    String query = "select * from dfs.tmp.parquet where (yr=1994 and qrtr='Q1' and o_custkey < 500) or (yr=1995 and qrtr='Q2' and o_custkey > 500)";
+    testIncludeFilter(query, 2, "Filter\\(", 8);
   }
 
   @Test //Json: partition filters are ANDed and belong to a top-level OR
   public void testPartitionFilter3_Json() throws Exception {
-    String query = String.format("select * from dfs_test.`%s/multilevel/json` where (dir0=1994 and dir1='Q1' and o_custkey < 500) or (dir0=1995 and dir1='Q2' and o_custkey > 500)", TEST_RES_PATH);
-    testIncludeFilter(query, 2, "Filter", 8);
+    String query = "select * from dfs.`multilevel/json` where (dir0=1994 and dir1='Q1' and o_custkey < 500) or (dir0=1995 and dir1='Q2' and o_custkey > 500)";
+    testIncludeFilter(query, 2, "Filter\\(", 8);
   }
 
   @Test //CSV: partition filters are ANDed and belong to a top-level OR
   public void testPartitionFilter3_Csv() throws Exception {
-    String query = String.format("select * from dfs_test.`%s/multilevel/csv` where (dir0=1994 and dir1='Q1' and columns[1] < 500) or (dir0=1995 and dir1='Q2' and columns[1] > 500)", TEST_RES_PATH);
-    testIncludeFilter(query, 2, "Filter", 8);
+    String query = "select * from dfs.`multilevel/csv` where (dir0=1994 and dir1='Q1' and columns[1] < 500) or (dir0=1995 and dir1='Q2' and columns[1] > 500)";
+    testIncludeFilter(query, 2, "Filter\\(", 8);
   }
 
   @Test //Parquet: filters contain join conditions and partition filters
   public void testPartitionFilter4_Parquet() throws Exception {
-    String query1 = String.format("select t1.dir0, t1.dir1, t1.o_custkey, t1.o_orderdate, cast(t2.c_name as varchar(10)) from dfs_test.`%s/multilevel/parquet` t1, cp.`tpch/customer.parquet` t2 where t1.o_custkey = t2.c_custkey and t1.dir0=1994 and t1.dir1='Q1'", TEST_RES_PATH);
+    String query1 = "select t1.dir0, t1.dir1, t1.o_custkey, t1.o_orderdate, cast(t2.c_name as varchar(10)) from dfs.`multilevel/parquet` t1, cp.`tpch/customer.parquet` t2 where" +
+      " t1.o_custkey = t2.c_custkey and t1.dir0=1994 and t1.dir1='Q1'";
     test(query1);
   }
 
   @Test //Parquet: filters contain join conditions and partition filters
   public void testPartitionFilter4_Parquet_from_CTAS() throws Exception {
-    String query1 = String.format("select t1.dir0, t1.dir1, t1.o_custkey, t1.o_orderdate, cast(t2.c_name as varchar(10)) from dfs_test.tmp.parquet t1, cp.`tpch/customer.parquet` t2 where t1.o_custkey = t2.c_custkey and t1.yr=1994 and t1.qrtr='Q1'", TEST_RES_PATH);
+    String query1 = "select t1.dir0, t1.dir1, t1.o_custkey, t1.o_orderdate, cast(t2.c_name as varchar(10)) from dfs.tmp.parquet t1, cp.`tpch/customer.parquet` t2 where " +
+      "t1.o_custkey = t2.c_custkey and t1.yr=1994 and t1.qrtr='Q1'";
     test(query1);
   }
 
   @Test //Json: filters contain join conditions and partition filters
   public void testPartitionFilter4_Json() throws Exception {
-    String query1 = String.format("select t1.dir0, t1.dir1, t1.o_custkey, t1.o_orderdate, cast(t2.c_name as varchar(10)) from dfs_test.`%s/multilevel/json` t1, cp.`tpch/customer.parquet` t2 where cast(t1.o_custkey as bigint) = cast(t2.c_custkey as bigint) and t1.dir0=1994 and t1.dir1='Q1'", TEST_RES_PATH);
+    String query1 = "select t1.dir0, t1.dir1, t1.o_custkey, t1.o_orderdate, cast(t2.c_name as varchar(10)) from dfs.`multilevel/json` t1, cp.`tpch/customer.parquet` t2 where " +
+    "cast(t1.o_custkey as bigint) = cast(t2.c_custkey as bigint) and t1.dir0=1994 and t1.dir1='Q1'";
     test(query1);
   }
 
   @Test //CSV: filters contain join conditions and partition filters
   public void testPartitionFilter4_Csv() throws Exception {
-    String query1 = String.format("select t1.dir0, t1.dir1, t1.columns[1] as o_custkey, t1.columns[4] as o_orderdate, cast(t2.c_name as varchar(10)) from dfs_test.`%s/multilevel/csv` t1, cp.`tpch/customer.parquet` t2 where cast(t1.columns[1] as bigint) = cast(t2.c_custkey as bigint) and t1.dir0=1994 and t1.dir1='Q1'", TEST_RES_PATH);
+    String query1 = "select t1.dir0, t1.dir1, t1.columns[1] as o_custkey, t1.columns[4] as o_orderdate, cast(t2.c_name as varchar(10)) from dfs.`multilevel/csv` t1, cp" +
+    ".`tpch/customer.parquet` t2 where cast(t1.columns[1] as bigint) = cast(t2.c_custkey as bigint) and t1.dir0=1994 and t1.dir1='Q1'";
     test(query1);
   }
 
   @Test // Parquet: IN filter
   public void testPartitionFilter5_Parquet() throws Exception {
-    String query = String.format("select dir0, dir1, o_custkey, o_orderdate from dfs_test.`%s/multilevel/parquet` where dir0 in (1995, 1996)", TEST_RES_PATH);
-    testExcludeFilter(query, 8, "Filter", 80);
+    String query = "select dir0, dir1, o_custkey, o_orderdate from dfs.`multilevel/parquet` where dir0 in (1995, 1996)";
+    testExcludeFilter(query, 8, "Filter\\(", 80);
   }
 
   @Test // Parquet: IN filter
   public void testPartitionFilter5_Parquet_from_CTAS() throws Exception {
-    String query = String.format("select yr, qrtr, o_custkey, o_orderdate from dfs_test.tmp.parquet where yr in (1995, 1996)", TEST_RES_PATH);
-    testExcludeFilter(query, 8, "Filter", 80);
+    String query = "select yr, qrtr, o_custkey, o_orderdate from dfs.tmp.parquet where yr in (1995, 1996)";
+    testExcludeFilter(query, 8, "Filter\\(", 80);
   }
 
   @Test // Json: IN filter
   public void testPartitionFilter5_Json() throws Exception {
-    String query = String.format("select dir0, dir1, o_custkey, o_orderdate from dfs_test.`%s/multilevel/json` where dir0 in (1995, 1996)", TEST_RES_PATH);
-    testExcludeFilter(query, 8, "Filter", 80);
+    String query = "select dir0, dir1, o_custkey, o_orderdate from dfs.`multilevel/json` where dir0 in (1995, 1996)";
+    testExcludeFilter(query, 8, "Filter\\(", 80);
   }
 
   @Test // CSV: IN filter
   public void testPartitionFilter5_Csv() throws Exception {
-    String query = String.format("select * from dfs_test.`%s/multilevel/csv` where dir0 in (1995, 1996)", TEST_RES_PATH);
-    testExcludeFilter(query, 8, "Filter", 80);
+    String query = "select * from dfs.`multilevel/csv` where dir0 in (1995, 1996)";
+    testExcludeFilter(query, 8, "Filter\\(", 80);
   }
 
   @Test // Parquet: one side of OR has partition filter only, other side has both partition filter and non-partition filter
   public void testPartitionFilter6_Parquet() throws Exception {
-    String query = String.format("select * from dfs_test.`%s/multilevel/parquet` where (dir0=1995 and o_totalprice < 40000) or dir0=1996", TEST_RES_PATH);
-    testIncludeFilter(query, 8, "Filter", 46);
+    String query = "select * from dfs.`multilevel/parquet` where (dir0=1995 and o_totalprice < 40000) or dir0=1996";
+    testIncludeFilter(query, 8, "Filter\\(", 46);
   }
 
   @Test // Parquet: one side of OR has partition filter only, other side has both partition filter and non-partition filter
   public void testPartitionFilter6_Parquet_from_CTAS() throws Exception {
-    String query = String.format("select * from dfs_test.tmp.parquet where (yr=1995 and o_totalprice < 40000) or yr=1996", TEST_RES_PATH);
+    String query = "select * from dfs.tmp.parquet where (yr=1995 and o_totalprice < 40000) or yr=1996";
     // Parquet RG filter pushdown further reduces to 6 files.
-    testIncludeFilter(query, 6, "Filter", 46);
+    testIncludeFilter(query, 6, "Filter\\(", 46);
   }
 
   @Test // Parquet: trivial case with 1 partition filter
   public void testPartitionFilter7_Parquet() throws Exception {
-    String query = String.format("select * from dfs_test.`%s/multilevel/parquet` where dir0=1995", TEST_RES_PATH);
-    testExcludeFilter(query, 4, "Filter", 40);
+    String query = "select * from dfs.`multilevel/parquet` where dir0=1995";
+    testExcludeFilter(query, 4, "Filter\\(", 40);
   }
 
   @Test // Parquet: trivial case with 1 partition filter
   public void testPartitionFilter7_Parquet_from_CTAS() throws Exception {
-    String query = String.format("select * from dfs_test.tmp.parquet where yr=1995", TEST_RES_PATH);
-    testExcludeFilter(query, 4, "Filter", 40);
+    String query = "select * from dfs.tmp.parquet where yr=1995";
+    testExcludeFilter(query, 4, "Filter\\(", 40);
   }
 
   @Test // Parquet: partition filter on subdirectory only
   public void testPartitionFilter8_Parquet() throws Exception {
-    String query = String.format("select * from dfs_test.`%s/multilevel/parquet` where dir1 in ('Q1','Q4')", TEST_RES_PATH);
-    testExcludeFilter(query, 6, "Filter", 60);
+    String query = "select * from dfs.`multilevel/parquet` where dir1 in ('Q1','Q4')";
+    testExcludeFilter(query, 6, "Filter\\(", 60);
   }
 
   @Test
   public void testPartitionFilter8_Parquet_from_CTAS() throws Exception {
-    String query = String.format("select * from dfs_test.tmp.parquet where qrtr in ('Q1','Q4')", TEST_RES_PATH);
-    testExcludeFilter(query, 6, "Filter", 60);
+    String query = "select * from dfs.tmp.parquet where qrtr in ('Q1','Q4')";
+    testExcludeFilter(query, 6, "Filter\\(", 60);
   }
 
   @Test // Parquet: partition filter on subdirectory only plus non-partition filter
   public void testPartitionFilter9_Parquet() throws Exception {
-    String query = String.format("select * from dfs_test.`%s/multilevel/parquet` where dir1 in ('Q1','Q4') and o_totalprice < 40000", TEST_RES_PATH);
+    String query = "select * from dfs.`multilevel/parquet` where dir1 in ('Q1','Q4') and o_totalprice < 40000";
     // Parquet RG filter pushdown further reduces to 4 files.
-    testIncludeFilter(query, 4, "Filter", 9);
+    testIncludeFilter(query, 4, "Filter\\(", 9);
   }
 
   @Test
   public void testPartitionFilter9_Parquet_from_CTAS() throws Exception {
-    String query = String.format("select * from dfs_test.tmp.parquet where qrtr in ('Q1','Q4') and o_totalprice < 40000", TEST_RES_PATH);
+    String query = "select * from dfs.tmp.parquet where qrtr in ('Q1','Q4') and o_totalprice < 40000";
     // Parquet RG filter pushdown further reduces to 4 files.
-    testIncludeFilter(query, 4, "Filter", 9);
+    testIncludeFilter(query, 4, "Filter\\(", 9);
   }
 
   @Test
   public void testPartitoinFilter10_Parquet() throws Exception {
-    String query = String.format("select max(o_orderprice) from dfs_test.`%s/multilevel/parquet` where dir0=1994 and dir1='Q1'", TEST_RES_PATH);
-    testExcludeFilter(query, 1, "Filter", 1);
+    String query = "select max(o_orderprice) from dfs.`multilevel/parquet` where dir0=1994 and dir1='Q1'";
+    testExcludeFilter(query, 1, "Filter\\(", 1);
   }
 
   @Test
   public void testPartitoinFilter10_Parquet_from_CTAS() throws Exception {
-    String query = String.format("select max(o_orderprice) from dfs_test.tmp.parquet where yr=1994 and qrtr='Q1'", TEST_RES_PATH);
-    testExcludeFilter(query, 1, "Filter", 1);
+    String query = "select max(o_orderprice) from dfs.tmp.parquet where yr=1994 and qrtr='Q1'";
+    testExcludeFilter(query, 1, "Filter\\(", 1);
   }
 
   @Test // see DRILL-2712
   @Category(UnlikelyTest.class)
   public void testMainQueryFalseCondition() throws Exception {
-    String root = FileUtils.getResourceAsFile("/multilevel/parquet").toURI().toString();
-    String query = String.format("select * from (select dir0, o_custkey from dfs_test.`%s` where dir0='1994') t where 1 = 0", root);
+    String query = "select * from (select dir0, o_custkey from dfs.`multilevel/parquet` where dir0='1994') t where 1 = 0";
     // the 1 = 0 becomes limit 0, which will require to read only one parquet file, in stead of 4 for year '1994'.
-    testExcludeFilter(query, 1, "Filter", 0);
+    testExcludeFilter(query, 1, "Filter\\(", 0);
   }
 
   @Test // see DRILL-2712
   @Category(UnlikelyTest.class)
   public void testMainQueryTrueCondition() throws Exception {
-    String root = FileUtils.getResourceAsFile("/multilevel/parquet").toURI().toString();
-    String query =  String.format("select * from (select dir0, o_custkey from dfs_test.`%s` where dir0='1994' ) t where 0 = 0", root);
-    testExcludeFilter(query, 4, "Filter", 40);
+    String query =  "select * from (select dir0, o_custkey from dfs.`multilevel/parquet` where dir0='1994' ) t where 0 = 0";
+    testExcludeFilter(query, 4, "Filter\\(", 40);
   }
 
   @Test // see DRILL-2712
   public void testMainQueryFilterRegularColumn() throws Exception {
-    String root = FileUtils.getResourceAsFile("/multilevel/parquet").toURI().toString();
-    String query =  String.format("select * from (select dir0, o_custkey from dfs_test.`%s` where dir0='1994' and o_custkey = 10) t limit 0", root);
+    String query = "select * from (select dir0, o_custkey from dfs.`multilevel/parquet` where dir0='1994' and o_custkey = 10) t limit 0";
     // with Parquet RG filter pushdown, reduce to 1 file ( o_custkey all > 10).
-    testIncludeFilter(query, 1, "Filter", 0);
+    testIncludeFilter(query, 1, "Filter\\(", 0);
   }
 
   @Test // see DRILL-2852 and DRILL-3591
   @Category(UnlikelyTest.class)
   public void testPartitionFilterWithCast() throws Exception {
-    String root = FileUtils.getResourceAsFile("/multilevel/parquet").toURI().toString();
-    String query = String.format("select myyear, myquarter, o_totalprice from (select cast(dir0 as varchar(10)) as myyear, "
-        + " cast(dir1 as varchar(10)) as myquarter, o_totalprice from dfs_test.`%s`) where myyear = cast('1995' as varchar(10)) "
-        + " and myquarter = cast('Q2' as varchar(10)) and o_totalprice < 40000.0 order by o_totalprice", root);
+    String query = "select myyear, myquarter, o_totalprice from (select cast(dir0 as varchar(10)) as myyear, "
+        + " cast(dir1 as varchar(10)) as myquarter, o_totalprice from dfs.`multilevel/parquet`) where myyear = cast('1995' as varchar(10)) "
+        + " and myquarter = cast('Q2' as varchar(10)) and o_totalprice < 40000.0 order by o_totalprice";
 
-    testIncludeFilter(query, 1, "Filter", 3);
+    testIncludeFilter(query, 1, "Filter\\(", 3);
   }
 
   @Test
   public void testPPWithNestedExpression() throws Exception {
-    String root = FileUtils.getResourceAsFile("/multilevel/parquet").toURI().toString();
-    String query = String.format("select * from dfs_test.`%s` where dir0 not in(1994) and o_orderpriority = '2-HIGH'",
-        root);
-    testIncludeFilter(query, 8, "Filter", 24);
+    String query = "select * from dfs.`multilevel/parquet` where dir0 not in(1994) and o_orderpriority = '2-HIGH'";
+    testIncludeFilter(query, 8, "Filter\\(", 24);
   }
 
   @Test
   public void testPPWithCase() throws Exception {
-    String root = FileUtils.getResourceAsFile("/multilevel/parquet").toURI().toString();
-    String query = String.format("select 1 from " +
-            "(select  CASE WHEN '07' = '13' THEN '13' ELSE CAST(dir0 as VARCHAR(4)) END as YEAR_FILTER from dfs_test.`%s` where o_orderpriority = '2-HIGH') subq" +
-            " where subq.YEAR_FILTER not in('1994')", root);
-    testIncludeFilter(query, 8, "Filter", 24);
+    String query = "select 1 from (select  CASE WHEN '07' = '13' THEN '13' ELSE CAST(dir0 as VARCHAR(4)) END as YEAR_FILTER from dfs.`multilevel/parquet` " +
+      "where o_orderpriority = '2-HIGH') subq where subq.YEAR_FILTER not in('1994')";
+    testIncludeFilter(query, 8, "Filter\\(", 24);
   }
 
   @Test // DRILL-3702
   @Category(UnlikelyTest.class)
   public void testPartitionFilterWithNonNullabeFilterExpr() throws Exception {
-    String query = String.format("select dir0, dir1, o_custkey, o_orderdate from dfs_test.`%s/multilevel/parquet` where concat(dir0, '') = '1994' and concat(dir1, '') = 'Q1'", TEST_RES_PATH);
-    testExcludeFilter(query, 1, "Filter", 10);
+    String query = "select dir0, dir1, o_custkey, o_orderdate from dfs.`multilevel/parquet` where concat(dir0, '') = '1994' and concat(dir1, '') = 'Q1'";
+    testExcludeFilter(query, 1, "Filter\\(", 10);
   }
 
   @Test // DRILL-2748
   public void testPartitionFilterAfterPushFilterPastAgg() throws Exception {
-    String query = String.format("select dir0, dir1, cnt from (select dir0, dir1, count(*) cnt from dfs_test.`%s/multilevel/parquet` group by dir0, dir1) where dir0 = '1994' and dir1 = 'Q1'", TEST_RES_PATH);
-    testExcludeFilter(query, 1, "Filter", 1);
+    String query = "select dir0, dir1, cnt from (select dir0, dir1, count(*) cnt from dfs.`multilevel/parquet` group by dir0, dir1) where dir0 = '1994' and dir1 = 'Q1'";
+    testExcludeFilter(query, 1, "Filter\\(", 1);
   }
 
   // Coalesce filter is on the non directory column. DRILL-4071
   @Test
   public void testPartitionWithCoalesceFilter_1() throws Exception {
-    String query = String.format("select 1 from dfs_test.`%s/multilevel/parquet` where dir0=1994 and dir1='Q1' and coalesce(o_custkey, 0) = 890", TEST_RES_PATH);
-    testIncludeFilter(query, 1, "Filter", 1);
+    String query = "select 1 from dfs.`multilevel/parquet` where dir0=1994 and dir1='Q1' and coalesce(o_custkey, 0) = 890";
+    testIncludeFilter(query, 1, "Filter\\(", 1);
   }
 
   // Coalesce filter is on the directory column
   @Test
   public void testPartitionWithCoalesceFilter_2() throws Exception {
-    String query = String.format("select 1 from dfs_test.`%s/multilevel/parquet` where dir0=1994 and o_custkey = 890 and coalesce(dir1, 'NA') = 'Q1'", TEST_RES_PATH);
-    testIncludeFilter(query, 1, "Filter", 1);
+    String query = "select 1 from dfs.`multilevel/parquet` where dir0=1994 and o_custkey = 890 and coalesce(dir1, 'NA') = 'Q1'";
+    testIncludeFilter(query, 1, "Filter\\(", 1);
   }
 
   @Test  //DRILL-4021: Json with complex type and nested flatten functions: dir0 and dir1 filters plus filter involves filter refering to output from nested flatten functions.
@@ -346,70 +338,68 @@ public class TestPartitionFilter extends PlanTestBase {
   public void testPartitionFilter_Json_WithFlatten() throws Exception {
     // this query expects to have the partition filter pushded.
     // With partition pruning, we will end with one file, and one row returned from the query.
-    final String query = String.format(
+    final String query =
         " select dir0, dir1, o_custkey, o_orderdate, provider from " +
         "   ( select dir0, dir1, o_custkey, o_orderdate, flatten(items['providers']) as provider " +
             " from (" +
             "   select dir0, dir1, o_custkey, o_orderdate, flatten(o_items) items " +
-            "     from dfs_test.`%s/multilevel/jsoncomplex`) ) " +
+            "     from dfs.`multilevel/jsoncomplex`) ) " +
             " where dir0=1995 " +   // should be pushed down and used as partitioning filter
             "   and dir1='Q1' " +   // should be pushed down and used as partitioning filter
-            "   and provider = 'BestBuy'", // should NOT be pushed down.
-        TEST_RES_PATH);
+            "   and provider = 'BestBuy'"; // should NOT be pushed down.
 
-    testIncludeFilter(query, 1, "Filter", 1);
+    testIncludeFilter(query, 1, "Filter\\(", 1);
   }
 
   @Test
   public void testLogicalDirPruning() throws Exception {
     // 1995/Q1 contains one valid parquet, while 1996/Q1 contains bad format parquet.
     // If dir pruning happens in logical, the query will run fine, since the bad parquet has been pruned before we build ParquetGroupScan.
-    String query = String.format("select dir0, o_custkey from dfs_test.`%s/multilevel/parquetWithBadFormat` where dir0=1995", TEST_RES_PATH);
-    testExcludeFilter(query, 1, "Filter", 10);
+    String query = "select dir0, o_custkey from dfs.`multilevel/parquetWithBadFormat` where dir0=1995";
+    testExcludeFilter(query, 1, "Filter\\(", 10);
   }
 
   @Test
   public void testLogicalDirPruning2() throws Exception {
     // 1995/Q1 contains one valid parquet, while 1996/Q1 contains bad format parquet.
     // If dir pruning happens in logical, the query will run fine, since the bad parquet has been pruned before we build ParquetGroupScan.
-    String query = String.format("select dir0, o_custkey from dfs_test.`%s/multilevel/parquetWithBadFormat` where dir0=1995 and o_custkey > 0", TEST_RES_PATH);
-    testIncludeFilter(query, 1, "Filter", 10);
+    String query = "select dir0, o_custkey from dfs.`multilevel/parquetWithBadFormat` where dir0=1995 and o_custkey > 0";
+    testIncludeFilter(query, 1, "Filter\\(", 10);
   }
 
   @Test  //DRILL-4665: Partition pruning should occur when LIKE predicate on non-partitioning column
   public void testPartitionFilterWithLike() throws Exception {
     // Also should be insensitive to the order of the predicates
-    String query1 = "select yr, qrtr from dfs_test.tmp.parquet where yr=1994 and o_custkey LIKE '%5%'";
-    String query2 = "select yr, qrtr from dfs_test.tmp.parquet where o_custkey LIKE '%5%' and yr=1994";
-    testIncludeFilter(query1, 4, "Filter", 9);
-    testIncludeFilter(query2, 4, "Filter", 9);
+    String query1 = "select yr, qrtr from dfs.tmp.parquet where yr=1994 and o_custkey LIKE '%5%'";
+    String query2 = "select yr, qrtr from dfs.tmp.parquet where o_custkey LIKE '%5%' and yr=1994";
+    testIncludeFilter(query1, 4, "Filter\\(", 9);
+    testIncludeFilter(query2, 4, "Filter\\(", 9);
     // Test when LIKE predicate on partitioning column
-    String query3 = "select yr, qrtr from dfs_test.tmp.parquet where yr LIKE '%1995%' and o_custkey LIKE '%3%'";
-    String query4 = "select yr, qrtr from dfs_test.tmp.parquet where o_custkey LIKE '%3%' and yr LIKE '%1995%'";
-    testIncludeFilter(query3, 4, "Filter", 16);
-    testIncludeFilter(query4, 4, "Filter", 16);
+    String query3 = "select yr, qrtr from dfs.tmp.parquet where yr LIKE '%1995%' and o_custkey LIKE '%3%'";
+    String query4 = "select yr, qrtr from dfs.tmp.parquet where o_custkey LIKE '%3%' and yr LIKE '%1995%'";
+    testIncludeFilter(query3, 4, "Filter\\(", 16);
+    testIncludeFilter(query4, 4, "Filter\\(", 16);
   }
 
   @Test //DRILL-3710 Partition pruning should occur with varying IN-LIST size
   public void testPartitionFilterWithInSubquery() throws Exception {
-    String query = String.format("select * from dfs_test.`%s/multilevel/parquet` where cast (dir0 as int) IN (1994, 1994, 1994, 1994, 1994, 1994)", TEST_RES_PATH);
+    String query = "select * from dfs.`multilevel/parquet` where cast (dir0 as int) IN (1994, 1994, 1994, 1994, 1994, 1994)";
     /* In list size exceeds threshold - no partition pruning since predicate converted to join */
     test("alter session set `planner.in_subquery_threshold` = 2");
-    testExcludeFilter(query, 12, "Filter", 40);
+    testExcludeFilter(query, 12, "Filter\\(", 40);
     /* In list size does not exceed threshold - partition pruning */
     test("alter session set `planner.in_subquery_threshold` = 10");
-    testExcludeFilter(query, 4, "Filter", 40);
+    testExcludeFilter(query, 4, "Filter\\(", 40);
   }
 
 
   @Test // DRILL-4825: querying same table with different filter in UNION ALL.
   public void testPruneSameTableInUnionAll() throws Exception {
-    final String query = String.format("select count(*) as cnt from "
-        + "( select dir0 from dfs_test.`%s/multilevel/parquet` where dir0 in ('1994') union all "
-        + "  select dir0 from dfs_test.`%s/multilevel/parquet` where dir0 in ('1995', '1996') )",
-        TEST_RES_PATH, TEST_RES_PATH);
+    final String query = "select count(*) as cnt from "
+        + "( select dir0 from dfs.`multilevel/parquet` where dir0 in ('1994') union all "
+        + "  select dir0 from dfs.`multilevel/parquet` where dir0 in ('1995', '1996') )";
 
-    String [] excluded = {"Filter"};
+    String [] excluded = {"Filter\\("};
 
     // verify plan that filter is applied in partition pruning.
     testPlanMatchingPatterns(query, null, excluded);
@@ -426,13 +416,12 @@ public class TestPartitionFilter extends PlanTestBase {
 
   @Test // DRILL-4825: querying same table with different filter in Join.
   public void testPruneSameTableInJoin() throws Exception {
-    final String query = String.format("select *  from "
-            + "( select sum(o_custkey) as x from dfs_test.`%s/multilevel/parquet` where dir0 in ('1994') ) join "
-            + " ( select sum(o_custkey) as y from dfs_test.`%s/multilevel/parquet` where dir0 in ('1995', '1996')) "
-            + " on x = y ",
-        TEST_RES_PATH, TEST_RES_PATH);
+    final String query = "select *  from "
+            + "( select sum(o_custkey) as x from dfs.`multilevel/parquet` where dir0 in ('1994') ) join "
+            + " ( select sum(o_custkey) as y from dfs.`multilevel/parquet` where dir0 in ('1995', '1996')) "
+            + " on x = y ";
 
-    String [] excluded = {"Filter"};
+    String [] excluded = {"Filter\\("};
     // verify plan that filter is applied in partition pruning.
     testPlanMatchingPatterns(query, null, excluded);
 
@@ -444,5 +433,4 @@ public class TestPartitionFilter extends PlanTestBase {
         .run();
 
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java b/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java
index fe7bc08..41d1227 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestProjectPushDown.java
@@ -19,10 +19,13 @@
 package org.apache.drill;
 
 import org.apache.drill.categories.PlannerTest;
+import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.nio.file.Paths;
+
 // Test the optimizer plan in terms of project pushdown.
 // When a query refers to a subset of columns in a table, optimizer should push the list
 // of refereed columns to the SCAN operator, so that SCAN operator would only retrieve
@@ -30,8 +33,11 @@ import org.junit.experimental.categories.Category;
 
 @Category(PlannerTest.class)
 public class TestProjectPushDown extends PlanTestBase {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory
-      .getLogger(TestProjectPushDown.class);
+  @BeforeClass
+  public static void setupTestFiles() {
+    dirTestWatcher.copyFileToRoot(Paths.get("sample-data", "nation.parquet"));
+    dirTestWatcher.copyFileToRoot(Paths.get("sample-data", "region.parquet"));
+  }
 
   @Test
   @Ignore
@@ -76,11 +82,11 @@ public class TestProjectPushDown extends PlanTestBase {
     String expectedColNames1 = "\"columns\" : [ \"`N_REGIONKEY`\", \"`N_NAME`\" ]";
     String expectedColNames2 = "\"columns\" : [ \"`R_REGIONKEY`\", \"`R_NAME`\" ]";
 
-    testPhysicalPlan("SELECT\n" + "  nations.N_NAME,\n" + "  regions.R_NAME\n"
+    testPhysicalPlan("SELECT nations.N_NAME, regions.R_NAME "
         + "FROM\n"
-        + "  dfs_test.`[WORKING_PATH]/../../sample-data/nation.parquet` nations\n"
+        + "  dfs.`sample-data/nation.parquet` nations\n"
         + "JOIN\n"
-        + "  dfs_test.`[WORKING_PATH]/../../sample-data/region.parquet` regions\n"
+        + "  dfs.`sample-data/region.parquet` regions\n"
         + "  on nations.N_REGIONKEY = regions.R_REGIONKEY", expectedColNames1,
         expectedColNames2);
   }
@@ -268,10 +274,6 @@ public class TestProjectPushDown extends PlanTestBase {
 
   @Test
   public void testSimpleProjectPastJoinPastFilterPastJoinPushDown() throws Exception {
-//    String sql = "select * " +
-//        "from cp.`%s` t0, cp.`%s` t1, cp.`%s` t2 " +
-//        "where t0.fname = t1.sname and t1.slastname = t2.tlastname and t0.fcolumns[0] + t1.scolumns = 100";
-
     final String firstExpected = "\"columns\" : [ \"`a`\", \"`fa`\", \"`fcolumns`[0]\" ],";
     final String secondExpected = "\"columns\" : [ \"`a`\", \"`b`\", \"`c`\", \"`sa`\" ],";
     final String thirdExpected = "\"columns\" : [ \"`d`\", \"`ta`\" ],";

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestSchemaChange.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestSchemaChange.java b/exec/java-exec/src/test/java/org/apache/drill/TestSchemaChange.java
index 4465e79..f404db4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestSchemaChange.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestSchemaChange.java
@@ -17,21 +17,24 @@
  */
 package org.apache.drill;
 
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.test.BaseTestQuery;
+import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 
-public class TestSchemaChange extends BaseTestQuery {
+import java.nio.file.Paths;
 
-  protected static final String WORKING_PATH = TestTools.getWorkingPath();
-  protected static final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources";
+public class TestSchemaChange extends BaseTestQuery {
+  @BeforeClass
+  public static void setupFiles() {
+    dirTestWatcher.copyResourceToRoot(Paths.get("schemachange"));
+  }
 
   @Test //DRILL-1605
   @Ignore("Until DRILL-2171 is fixed")
   public void testMultiFilesWithDifferentSchema() throws Exception {
-    final String query = String.format("select a, b from dfs_test.`%s/schemachange/multi/*.json`", TEST_RES_PATH);
     testBuilder()
-        .sqlQuery(query)
+        .sqlQuery("select a, b from dfs.`schemachange/multi/*.json`")
         .ordered()
         .baselineColumns("a", "b")
         .baselineValues(1L, null)

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestSelectWithOption.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestSelectWithOption.java b/exec/java-exec/src/test/java/org/apache/drill/TestSelectWithOption.java
index 80565a1..5a49ad2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestSelectWithOption.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestSelectWithOption.java
@@ -18,7 +18,7 @@
 package org.apache.drill;
 
 import static java.lang.String.format;
-import static org.apache.drill.TestBuilder.listOf;
+import static org.apache.drill.test.TestBuilder.listOf;
 
 import java.io.File;
 import java.io.FileWriter;
@@ -26,6 +26,8 @@ import java.io.IOException;
 
 import org.apache.drill.categories.SqlTest;
 import org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory;
+import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.TestBuilder;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -34,7 +36,7 @@ public class TestSelectWithOption extends BaseTestQuery {
   private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(WorkspaceSchemaFactory.class);
 
   private File genCSVFile(String name, String... rows) throws IOException {
-    File file = new File(format("target/%s_%s.csv", this.getClass().getName(), name));
+    File file = new File(format("%s/%s.csv", dirTestWatcher.getRootDir(), name));
     try (FileWriter fw = new FileWriter(file)) {
       for (int i = 0; i < rows.length; i++) {
         fw.append(rows[i] + "\n");
@@ -45,7 +47,7 @@ public class TestSelectWithOption extends BaseTestQuery {
 
   private String genCSVTable(String name, String... rows) throws IOException {
     File f = genCSVFile(name, rows);
-    return format("dfs.`${WORKING_PATH}/%s`", f.getPath());
+    return format("dfs.`%s`", f.getName());
   }
 
   private void testWithResult(String query, Object... expectedResult) throws Exception {
@@ -245,9 +247,6 @@ public class TestSelectWithOption extends BaseTestQuery {
     String[] jsonQueries = {
         format("select columns from table(%s ('JSON'))", jsonTableName),
         format("select columns from table(%s(type => 'JSON'))", jsonTableName),
-//        format("select columns from %s ('JSON')", jsonTableName),
-//        format("select columns from %s (type => 'JSON')", jsonTableName),
-//        format("select columns from %s(type => 'JSON')", jsonTableName),
         // we can use named format plugin configurations too!
         format("select columns from table(%s(type => 'Named', name => 'json'))", jsonTableName),
     };
@@ -260,7 +259,7 @@ public class TestSelectWithOption extends BaseTestQuery {
   public void testUse() throws Exception {
     File f = genCSVFile("testUse",
         "{\"columns\": [\"f\",\"g\"]}");
-    String jsonTableName = format("`${WORKING_PATH}/%s`", f.getPath());
+    String jsonTableName = String.format("dfs.`%s`", f.getName());
     // the extension is actually csv
     test("use dfs");
     try {

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestSelectivity.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestSelectivity.java b/exec/java-exec/src/test/java/org/apache/drill/TestSelectivity.java
index 0c41ae0..de49a5f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestSelectivity.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestSelectivity.java
@@ -18,6 +18,7 @@
 package org.apache.drill;
 
 import org.apache.drill.categories.PlannerTest;
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
index b4ac11f..34c8c6c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestStarQueries.java
@@ -22,20 +22,25 @@ import org.apache.drill.categories.SqlTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.types.TypeProtos;
-import org.apache.drill.common.util.FileUtils;
-import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.record.BatchSchema;
 import org.apache.drill.test.rowSet.SchemaBuilder;
+import org.apache.drill.test.BaseTestQuery;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.nio.file.Paths;
+
 import static org.junit.Assert.assertEquals;
 
 @Category({SqlTest.class, PlannerTest.class})
-public class TestStarQueries extends BaseTestQuery{
+public class TestStarQueries extends BaseTestQuery {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestStarQueries.class);
-  static final String WORKING_PATH = TestTools.getWorkingPath();
-  static final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources";
+
+  @BeforeClass
+  public static void setupTestFiles() {
+    dirTestWatcher.copyResourceToRoot(Paths.get("multilevel", "parquet"));
+  }
 
   @Test // see DRILL-2021
   @Category(UnlikelyTest.class)
@@ -248,7 +253,7 @@ public class TestStarQueries extends BaseTestQuery{
   @Test // DRILL-1293
   @Category(UnlikelyTest.class)
   public void testStarView1() throws Exception {
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
     test("create view vt1 as select * from cp.`tpch/region.parquet` r, cp.`tpch/nation.parquet` n where r.r_regionkey = n.n_regionkey");
     test("select * from vt1");
     test("drop view vt1");
@@ -416,11 +421,10 @@ public class TestStarQueries extends BaseTestQuery{
   @Test // DRILL-1500
   @Category(UnlikelyTest.class)
   public void testStarPartitionFilterOrderBy() throws Exception {
-    String query = String.format("select * from dfs_test.`%s/multilevel/parquet` where dir0=1994 and dir1='Q1' order by dir0 limit 1", TEST_RES_PATH);
     org.joda.time.DateTime mydate = new org.joda.time.DateTime("1994-01-20T00:00:00.000");
 
     testBuilder()
-    .sqlQuery(query)
+    .sqlQuery("select * from dfs.`multilevel/parquet` where dir0=1994 and dir1='Q1' order by dir0 limit 1")
     .ordered()
     .baselineColumns("dir0", "dir1", "o_clerk", "o_comment", "o_custkey", "o_orderdate", "o_orderkey",  "o_orderpriority", "o_orderstatus", "o_shippriority",  "o_totalprice")
     .baselineValues("1994", "Q1", "Clerk#000000743", "y pending requests integrate", 1292, mydate, 66, "5-LOW", "F",  0, 104190.66)
@@ -481,19 +485,13 @@ public class TestStarQueries extends BaseTestQuery{
   @Test //DRILL-2802
   @Category(UnlikelyTest.class)
   public void testSelectPartitionColumnOnly() throws Exception {
-    final String table = FileUtils.getResourceAsFile("/multilevel/parquet").toURI().toString();
-    final String query1 = String.format("select dir0 from dfs_test.`%s` limit 1 ", table);
-
     final String[] expectedPlan1 = {".*Project.*dir0=\\[\\$0\\]"};
     final String[] excludedPlan1 = {};
-    PlanTestBase.testPlanMatchingPatterns(query1, expectedPlan1, excludedPlan1);
-
-    final String query2 = String.format("select dir0, dir1 from dfs_test.`%s` limit 1 ", table);
+    PlanTestBase.testPlanMatchingPatterns("select dir0 from dfs.`multilevel/parquet` limit 1", expectedPlan1, excludedPlan1);
 
     final String[] expectedPlan2 = {".*Project.*dir0=\\[\\$0\\], dir1=\\[\\$1\\]"};
     final String[] excludedPlan2 = {};
-    PlanTestBase.testPlanMatchingPatterns(query2, expectedPlan2, excludedPlan2);
-
+    PlanTestBase.testPlanMatchingPatterns("select dir0, dir1 from dfs.`multilevel/parquet` limit 1", expectedPlan2, excludedPlan2);
   }
 
   @Test   // DRILL-2053 : column name is case-insensitive when join a CTE with a regluar table.

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestTextJoin.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTextJoin.java b/exec/java-exec/src/test/java/org/apache/drill/TestTextJoin.java
index 7fd9c8e..d427a3d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTextJoin.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTextJoin.java
@@ -17,29 +17,22 @@
  */
 package org.apache.drill;
 
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.Test;
 
-public class TestTextJoin extends BaseTestQuery{
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestTextJoin.class);
-
-  static final String WORKING_PATH = TestTools.getWorkingPath();
-  static final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources";
-
+public class TestTextJoin extends BaseTestQuery {
   @Test
   public void testTextJoin1() throws Exception {
-    String query1 = String.format("select r.columns[0] as v, r.columns[1] as w, r.columns[2] as x, u.columns[0] as y, t.columns[0] as z from dfs_test.`%s/uservisits/rankings.tbl` r, "
-        + " dfs_test.`%s/uservisits/uservisits.tbl` u, dfs_test.`%s/uservisits/temp1.tbl` t "
-        + " where r.columns[1]=u.columns[1] and r.columns[1] = t.columns[1]", TEST_RES_PATH, TEST_RES_PATH, TEST_RES_PATH);
-    test(query1);
+    test("select r.columns[0] as v, r.columns[1] as w, r.columns[2] as x, u.columns[0] as y, t.columns[0] as z "
+      + " from cp.`uservisits/rankings.tbl` r, "
+      + " cp.`uservisits/uservisits.tbl` u, cp.`uservisits/temp1.tbl` t "
+      + " where r.columns[1]=u.columns[1] and r.columns[1] = t.columns[1]");
   }
 
   @Test
   public void testTextJoin2() throws Exception {
-    String query1 = String.format("select r.columns[0] as v, r.columns[1] as w, r.columns[2] as x, u.columns[0] as y "
-        + " from dfs_test.`%s/uservisits/rankings.tbl` r, dfs_test.`%s/uservisits/uservisits.tbl` u "
-        + " where r.columns[1]=u.columns[1] and r.columns[0] < 50", TEST_RES_PATH, TEST_RES_PATH);
-    test(query1);
+    test("select r.columns[0] as v, r.columns[1] as w, r.columns[2] as x, u.columns[0] as y "
+      + " from cp.`uservisits/rankings.tbl` r, cp.`uservisits/uservisits.tbl` u "
+      + " where r.columns[1]=u.columns[1] and r.columns[0] < 50");
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributed.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributed.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributed.java
index 5164de5..88a2311 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributed.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributed.java
@@ -18,6 +18,7 @@
 package org.apache.drill;
 
 import org.apache.drill.categories.SlowTest;
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedConcurrent.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedConcurrent.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedConcurrent.java
index 07b9eb6..917890b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedConcurrent.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedConcurrent.java
@@ -26,10 +26,12 @@ import java.util.concurrent.Semaphore;
 
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.common.exceptions.UserException;
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.test.TestTools;
 import org.apache.drill.exec.proto.UserBitShared;
 import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState;
 import org.apache.drill.exec.rpc.user.UserResultsListener;
+import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.QueryTestUtil;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java
index 94433f4..2a8ccc3 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchDistributedStreaming.java
@@ -17,10 +17,11 @@
  */
 package org.apache.drill;
 
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.Ignore;
 import org.junit.Test;
 
-public class TestTpchDistributedStreaming extends BaseTestQuery{
+public class TestTpchDistributedStreaming extends BaseTestQuery {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestTpchDistributedStreaming.class);
 
   private void testDistributed(String fileName) throws Exception{

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestTpchExplain.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchExplain.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchExplain.java
index 1fc437c..204684b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchExplain.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchExplain.java
@@ -18,12 +18,13 @@
 package org.apache.drill;
 
 import org.apache.drill.categories.PlannerTest;
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category(PlannerTest.class)
-public class TestTpchExplain extends BaseTestQuery{
+public class TestTpchExplain extends BaseTestQuery {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestTpchExplain.class);
 
   private static final String EXPLAIN_PREFIX = "EXPLAIN PLAN FOR ";

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestTpchLimit0.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchLimit0.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchLimit0.java
index 68ebdda..940b4af 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchLimit0.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchLimit0.java
@@ -18,12 +18,13 @@
 package org.apache.drill;
 
 import org.apache.drill.categories.PlannerTest;
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category(PlannerTest.class)
-public class TestTpchLimit0 extends BaseTestQuery{
+public class TestTpchLimit0 extends BaseTestQuery {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestTpchLimit0.class);
 
   private void testLimitZero(String fileName) throws Exception {

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestTpchPlanning.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchPlanning.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchPlanning.java
index 1c93a07..8299e6c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchPlanning.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchPlanning.java
@@ -18,7 +18,7 @@
 package org.apache.drill;
 
 import org.apache.drill.categories.PlannerTest;
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.test.TestTools;
 import org.junit.Ignore;
 import org.junit.Rule;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestTpchSingleMode.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestTpchSingleMode.java b/exec/java-exec/src/test/java/org/apache/drill/TestTpchSingleMode.java
index abefb16..a76ec8a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestTpchSingleMode.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestTpchSingleMode.java
@@ -17,10 +17,11 @@
  */
 package org.apache.drill;
 
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.Ignore;
 import org.junit.Test;
 
-public class TestTpchSingleMode extends BaseTestQuery{
+public class TestTpchSingleMode extends BaseTestQuery {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestTpchSingleMode.class);
 
   private static final String SINGLE_MODE = "ALTER SESSION SET `planner.disable_exchanges` = true;";

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java b/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
index 831b70c..655d036 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestUnionAll.java
@@ -26,16 +26,17 @@ import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos;
-import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.exec.work.foreman.SqlUnsupportedException;
 import org.apache.drill.exec.work.foreman.UnsupportedRelOperatorException;
-import org.junit.Ignore;
+import org.apache.drill.test.BaseTestQuery;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileWriter;
+import java.nio.file.Paths;
 import java.util.List;
 
 @Category({SqlTest.class, OperatorTest.class})
@@ -46,6 +47,11 @@ public class TestUnionAll extends BaseTestQuery {
   private static final String enableDistribute = "alter session set `planner.enable_unionall_distribute` = true";
   private static final String defaultDistribute = "alter session reset `planner.enable_unionall_distribute`";
 
+  @BeforeClass
+  public static void setupTestFiles() {
+    dirTestWatcher.copyResourceToRoot(Paths.get("multilevel", "parquet"));
+  }
+
   @Test  // Simple Union-All over two scans
   public void testUnionAll1() throws Exception {
     String query = "(select n_regionkey from cp.`tpch/nation.parquet`) union all (select r_regionkey from cp.`tpch/region.parquet`)";
@@ -171,13 +177,12 @@ public class TestUnionAll extends BaseTestQuery {
   @Test // DRILL-1905: Union-all of * column from JSON files in different directories
   @Category(UnlikelyTest.class)
   public void testUnionAll9() throws Exception {
-    String file0 = FileUtils.getResourceAsFile("/multilevel/json/1994/Q1/orders_94_q1.json").toURI().toString();
-    String file1 = FileUtils.getResourceAsFile("/multilevel/json/1995/Q1/orders_95_q1.json").toURI().toString();
-    String query = String.format("select o_custkey, o_orderstatus, o_totalprice, o_orderdate, o_orderpriority, o_clerk, o_shippriority, o_comment, o_orderkey from dfs_test.`%s` union all " +
-                                 "select o_custkey, o_orderstatus, o_totalprice, o_orderdate, o_orderpriority, o_clerk, o_shippriority, o_comment, o_orderkey from dfs_test.`%s`", file0, file1);
+    String file0 = "/multilevel/json/1994/Q1/orders_94_q1.json";
+    String file1 = "/multilevel/json/1995/Q1/orders_95_q1.json";
 
     testBuilder()
-        .sqlQuery(query)
+        .sqlQuery("select o_custkey, o_orderstatus, o_totalprice, o_orderdate, o_orderpriority, o_clerk, o_shippriority, o_comment, o_orderkey from cp.`%s` union " +
+          "all select o_custkey, o_orderstatus, o_totalprice, o_orderdate, o_orderpriority, o_clerk, o_shippriority, o_comment, o_orderkey from cp.`%s`", file0, file1)
         .unOrdered()
         .csvBaselineFile("testframework/testUnionAllQueries/q9.tsv")
         .baselineTypes(TypeProtos.MinorType.BIGINT, TypeProtos.MinorType.VARCHAR, TypeProtos.MinorType.FLOAT8, TypeProtos.MinorType.VARCHAR,
@@ -204,17 +209,17 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test
   public void testUnionAllViewExpandableStar() throws Exception {
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
     test("create view nation_view_testunionall as select n_name, n_nationkey from cp.`tpch/nation.parquet`;");
     test("create view region_view_testunionall as select r_name, r_regionkey from cp.`tpch/region.parquet`;");
 
-    String query1 = "(select * from dfs_test.tmp.`nation_view_testunionall`) " +
+    String query1 = "(select * from dfs.tmp.`nation_view_testunionall`) " +
                     "union all " +
-                    "(select * from dfs_test.tmp.`region_view_testunionall`) ";
+                    "(select * from dfs.tmp.`region_view_testunionall`) ";
 
     String query2 =  "(select r_name, r_regionkey from cp.`tpch/region.parquet`) " +
                      "union all " +
-                     "(select * from dfs_test.tmp.`nation_view_testunionall`)";
+                     "(select * from dfs.tmp.`nation_view_testunionall`)";
 
     try {
       testBuilder()
@@ -240,11 +245,11 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test(expected = UnsupportedRelOperatorException.class) // see DRILL-2002
   public void testUnionAllViewUnExpandableStar() throws Exception {
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
     test("create view nation_view_testunionall as select * from cp.`tpch/nation.parquet`;");
 
     try {
-      String query = "(select * from dfs_test.tmp.`nation_view_testunionall`) " +
+      String query = "(select * from dfs.tmp.`nation_view_testunionall`) " +
                      "union all (select * from cp.`tpch/region.parquet`)";
       test(query);
     } catch(UserException ex) {
@@ -257,7 +262,7 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test
   public void testDiffDataTypesAndModes() throws Exception {
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
     test("create view nation_view_testunionall as select n_name, n_nationkey from cp.`tpch/nation.parquet`;");
     test("create view region_view_testunionall as select r_name, r_regionkey from cp.`tpch/region.parquet`;");
 
@@ -348,23 +353,13 @@ public class TestUnionAll extends BaseTestQuery {
   @Test // see DRILL-1977, DRILL-2376, DRILL-2377, DRILL-2378, DRILL-2379
   @Category(UnlikelyTest.class)
   public void testAggregationOnUnionAllOperator() throws Exception {
-    String root = FileUtils.getResourceAsFile("/store/text/data/t.json").toURI().toString();
-    String query1 = String.format(
-            "(select calc1, max(b1) as `max`, min(b1) as `min`, count(c1) as `count` " +
-                    "from (select a1 + 10 as calc1, b1, c1 from dfs_test.`%s` " +
-                    "union all " +
-                    "select a1 + 100 as diff1, b1 as diff2, c1 as diff3 from dfs_test.`%s`) " +
-                    "group by calc1 order by calc1)", root, root);
-
-    String query2 = String.format(
-        "(select calc1, min(b1) as `min`, max(b1) as `max`, count(c1) as `count` " +
-        "from (select a1 + 10 as calc1, b1, c1 from dfs_test.`%s` " +
-        "union all " +
-        "select a1 + 100 as diff1, b1 as diff2, c1 as diff3 from dfs_test.`%s`) " +
-        "group by calc1 order by calc1)", root, root);
+    String root = "/store/text/data/t.json";
 
     testBuilder()
-        .sqlQuery(query1)
+        .sqlQuery("(select calc1, max(b1) as `max`, min(b1) as `min`, count(c1) as `count` " +
+          "from (select a1 + 10 as calc1, b1, c1 from cp.`%s` " +
+          "union all select a1 + 100 as diff1, b1 as diff2, c1 as diff3 from cp.`%s`) " +
+          "group by calc1 order by calc1)", root, root)
         .ordered()
         .csvBaselineFile("testframework/testExampleQueries/testAggregationOnUnionAllOperator/q1.tsv")
         .baselineTypes(TypeProtos.MinorType.BIGINT, TypeProtos.MinorType.BIGINT, TypeProtos.MinorType.BIGINT, TypeProtos.MinorType.BIGINT)
@@ -372,7 +367,10 @@ public class TestUnionAll extends BaseTestQuery {
         .build().run();
 
     testBuilder()
-        .sqlQuery(query2)
+        .sqlQuery("(select calc1, min(b1) as `min`, max(b1) as `max`, count(c1) as `count` " +
+          "from (select a1 + 10 as calc1, b1, c1 from cp.`%s` " +
+          "union all select a1 + 100 as diff1, b1 as diff2, c1 as diff3 from cp.`%s`) " +
+          "group by calc1 order by calc1)", root, root)
         .ordered()
         .csvBaselineFile("testframework/testExampleQueries/testAggregationOnUnionAllOperator/q2.tsv")
         .baselineTypes(TypeProtos.MinorType.BIGINT, TypeProtos.MinorType.BIGINT, TypeProtos.MinorType.BIGINT, TypeProtos.MinorType.BIGINT)
@@ -382,40 +380,23 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test(expected = UserException.class) // see DRILL-2590
   public void testUnionAllImplicitCastingFailure() throws Exception {
-    String rootInt = FileUtils.getResourceAsFile("/store/json/intData.json").toURI().toString();
-    String rootBoolean = FileUtils.getResourceAsFile("/store/json/booleanData.json").toURI().toString();
+    String rootInt = "/store/json/intData.json";
+    String rootBoolean = "/store/json/booleanData.json";
 
-    String query = String.format(
-        "(select key from dfs_test.`%s` " +
+    test("(select key from cp.`%s` " +
         "union all " +
-        "select key from dfs_test.`%s` )", rootInt, rootBoolean);
-
-    test(query);
+        "select key from cp.`%s` )", rootInt, rootBoolean);
   }
 
   @Test // see DRILL-2591
   @Category(UnlikelyTest.class)
   public void testDateAndTimestampJson() throws Exception {
-    String rootDate = FileUtils.getResourceAsFile("/store/json/dateData.json").toURI().toString();
-    String rootTimpStmp = FileUtils.getResourceAsFile("/store/json/timeStmpData.json").toURI().toString();
-
-    String query1 = String.format(
-        "(select max(key) as key from dfs_test.`%s` " +
-        "union all " +
-        "select key from dfs_test.`%s`)", rootDate, rootTimpStmp);
-
-    String query2 = String.format(
-        "select key from dfs_test.`%s` " +
-        "union all " +
-        "select max(key) as key from dfs_test.`%s`", rootDate, rootTimpStmp);
-
-    String query3 = String.format(
-        "select key from dfs_test.`%s` " +
-        "union all " +
-        "select max(key) as key from dfs_test.`%s`", rootDate, rootTimpStmp);
+    String rootDate = "/store/json/dateData.json";
+    String rootTimpStmp = "/store/json/timeStmpData.json";
 
     testBuilder()
-        .sqlQuery(query1)
+        .sqlQuery("(select max(key) as key from cp.`%s` " +
+          "union all select key from cp.`%s`)", rootDate, rootTimpStmp)
         .unOrdered()
         .csvBaselineFile("testframework/testUnionAllQueries/q18_1.tsv")
         .baselineTypes(TypeProtos.MinorType.VARCHAR)
@@ -423,7 +404,8 @@ public class TestUnionAll extends BaseTestQuery {
         .build().run();
 
     testBuilder()
-        .sqlQuery(query2)
+        .sqlQuery("select key from cp.`%s` " +
+          "union all select max(key) as key from cp.`%s`", rootDate, rootTimpStmp)
         .unOrdered()
         .csvBaselineFile("testframework/testUnionAllQueries/q18_2.tsv")
         .baselineTypes(TypeProtos.MinorType.VARCHAR)
@@ -431,7 +413,8 @@ public class TestUnionAll extends BaseTestQuery {
         .build().run();
 
     testBuilder()
-        .sqlQuery(query3)
+        .sqlQuery("select key from cp.`%s` " +
+          "union all select max(key) as key from cp.`%s`", rootDate, rootTimpStmp)
         .unOrdered()
         .csvBaselineFile("testframework/testUnionAllQueries/q18_3.tsv")
         .baselineTypes(TypeProtos.MinorType.VARCHAR)
@@ -442,21 +425,11 @@ public class TestUnionAll extends BaseTestQuery {
   @Test // see DRILL-2637
   @Category(UnlikelyTest.class)
   public void testUnionAllOneInputContainsAggFunction() throws Exception {
-    String root = FileUtils.getResourceAsFile("/multilevel/csv/1994/Q1/orders_94_q1.csv").toURI().toString();
-    String query1 = String.format("select * from ((select count(c1) as ct from (select columns[0] c1 from dfs.`%s`)) \n" +
-        "union all \n" +
-        "(select columns[0] c2 from dfs.`%s`)) order by ct limit 3", root, root);
-
-    String query2 = String.format("select * from ((select columns[0] ct from dfs.`%s`)\n" +
-        "union all \n" +
-        "(select count(c1) as c2 from (select columns[0] c1 from dfs.`%s`))) order by ct limit 3", root, root);
-
-    String query3 = String.format("select * from ((select count(c1) as ct from (select columns[0] c1 from dfs.`%s`))\n" +
-        "union all \n" +
-        "(select count(c1) as c2 from (select columns[0] c1 from dfs.`%s`))) order by ct", root, root);
+    String root = "/multilevel/csv/1994/Q1/orders_94_q1.csv";
 
     testBuilder()
-        .sqlQuery(query1)
+        .sqlQuery("select * from ((select count(c1) as ct from (select columns[0] c1 from cp.`%s`)) \n" +
+          "union all (select columns[0] c2 from cp.`%s`)) order by ct limit 3", root, root)
         .ordered()
         .baselineColumns("ct")
         .baselineValues((long) 10)
@@ -465,7 +438,8 @@ public class TestUnionAll extends BaseTestQuery {
         .build().run();
 
     testBuilder()
-        .sqlQuery(query2)
+        .sqlQuery("select * from ((select columns[0] ct from cp.`%s`)\n" +
+          "union all (select count(c1) as c2 from (select columns[0] c1 from cp.`%s`))) order by ct limit 3", root, root)
         .ordered()
         .baselineColumns("ct")
         .baselineValues((long) 10)
@@ -474,7 +448,8 @@ public class TestUnionAll extends BaseTestQuery {
         .build().run();
 
     testBuilder()
-        .sqlQuery(query3)
+        .sqlQuery("select * from ((select count(c1) as ct from (select columns[0] c1 from cp.`%s`))\n" +
+          "union all (select count(c1) as c2 from (select columns[0] c1 from cp.`%s`))) order by ct", root, root)
          .ordered()
          .baselineColumns("ct")
          .baselineValues((long) 10)
@@ -485,19 +460,18 @@ public class TestUnionAll extends BaseTestQuery {
   @Test // see DRILL-2717
   @Category(UnlikelyTest.class)
   public void testUnionInputsGroupByOnCSV() throws Exception {
-    String root = FileUtils.getResourceAsFile("/multilevel/csv/1994/Q1/orders_94_q1.csv").toURI().toString();
-    String query = String.format("select * from \n" +
-            "((select columns[0] as col0 from dfs.`%s` t1 \n" +
+    String root = "/multilevel/csv/1994/Q1/orders_94_q1.csv";
+
+    testBuilder()
+        .sqlQuery("select * from \n" +
+            "((select columns[0] as col0 from cp.`%s` t1 \n" +
             "where t1.columns[0] = 66) \n" +
             "union all \n" +
-            "(select columns[0] c2 from dfs.`%s` t2 \n" +
+            "(select columns[0] c2 from cp.`%s` t2 \n" +
             "where t2.columns[0] is not null \n" +
             "group by columns[0])) \n" +
-        "group by col0"
-        , root, root);
-
-    testBuilder()
-        .sqlQuery(query)
+            "group by col0"
+          , root, root)
         .unOrdered()
         .baselineColumns("col0")
         .baselineValues("290")
@@ -516,12 +490,9 @@ public class TestUnionAll extends BaseTestQuery {
   @Test // see DRILL-2639
   @Category(UnlikelyTest.class)
   public void testUnionAllDiffTypesAtPlanning() throws Exception {
-    String query = "select count(c1) as ct from (select cast(r_regionkey as int) c1 from cp.`tpch/region.parquet`) " +
-        "union all " +
-        "(select cast(r_regionkey as int) c2 from cp.`tpch/region.parquet`)";
-
     testBuilder()
-        .sqlQuery(query)
+        .sqlQuery("select count(c1) as ct from (select cast(r_regionkey as int) c1 from cp.`tpch/region.parquet`) " +
+          "union all (select cast(r_regionkey as int) c2 from cp.`tpch/region.parquet`)")
         .ordered()
         .baselineColumns("ct")
         .baselineValues((long) 5)
@@ -536,18 +507,15 @@ public class TestUnionAll extends BaseTestQuery {
   @Test // see DRILL-2612
   @Category(UnlikelyTest.class)
   public void testUnionAllRightEmptyJson() throws Exception {
-    String rootEmpty = FileUtils.getResourceAsFile("/project/pushdown/empty.json").toURI().toString();
-    String rootSimple = FileUtils.getResourceAsFile("/store/json/booleanData.json").toURI().toString();
-
-    String queryRightEmpty = String.format(
-        "select key from dfs_test.`%s` " +
-        "union all " +
-        "select key from dfs_test.`%s`",
-        rootSimple,
-        rootEmpty);
+    String rootEmpty = "/project/pushdown/empty.json";
+    String rootSimple = "/store/json/booleanData.json";
 
     testBuilder()
-      .sqlQuery(queryRightEmpty)
+      .sqlQuery("select key from cp.`%s` " +
+          "union all " +
+          "select key from cp.`%s`",
+        rootSimple,
+        rootEmpty)
       .unOrdered()
       .baselineColumns("key")
       .baselineValues(true)
@@ -557,18 +525,15 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test
   public void testUnionAllLeftEmptyJson() throws Exception {
-    final String rootEmpty = FileUtils.getResourceAsFile("/project/pushdown/empty.json").toURI().toString();
-    final String rootSimple = FileUtils.getResourceAsFile("/store/json/booleanData.json").toURI().toString();
-
-    final String queryLeftEmpty = String.format(
-        "select key from dfs_test.`%s` " +
-        "union all " +
-        "select key from dfs_test.`%s`",
-        rootEmpty,
-        rootSimple);
+    final String rootEmpty = "/project/pushdown/empty.json";
+    final String rootSimple = "/store/json/booleanData.json";
 
     testBuilder()
-        .sqlQuery(queryLeftEmpty)
+        .sqlQuery("select key from cp.`%s` " +
+            "union all " +
+            "select key from cp.`%s`",
+          rootEmpty,
+          rootSimple)
         .unOrdered()
         .baselineColumns("key")
         .baselineValues(true)
@@ -579,13 +544,7 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test
   public void testUnionAllBothEmptyJson() throws Exception {
-    final String rootEmpty = FileUtils.getResourceAsFile("/project/pushdown/empty.json").toURI().toString();
-    final String query = String.format(
-        "select key from dfs_test.`%s` " +
-            "union all " +
-            "select key from dfs_test.`%s`",
-        rootEmpty,
-        rootEmpty);
+    final String rootEmpty = "/project/pushdown/empty.json";
 
     final List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema = Lists.newArrayList();
     final TypeProtos.MajorType majorType = TypeProtos.MajorType.newBuilder()
@@ -595,7 +554,11 @@ public class TestUnionAll extends BaseTestQuery {
     expectedSchema.add(Pair.of(SchemaPath.getSimplePath("key"), majorType));
 
     testBuilder()
-        .sqlQuery(query)
+        .sqlQuery("select key from cp.`%s` " +
+            "union all " +
+            "select key from cp.`%s`",
+          rootEmpty,
+          rootEmpty)
         .schemaBaseLine(expectedSchema)
         .build()
         .run();
@@ -603,17 +566,14 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test
   public void testUnionAllRightEmptyDataBatch() throws Exception {
-    String rootSimple = FileUtils.getResourceAsFile("/store/json/booleanData.json").toURI().toString();
-
-    String queryRightEmptyBatch = String.format(
-        "select key from dfs_test.`%s` " +
-            "union all " +
-            "select key from dfs_test.`%s` where 1 = 0",
-        rootSimple,
-        rootSimple);
+    String rootSimple = "/store/json/booleanData.json";
 
     testBuilder()
-        .sqlQuery(queryRightEmptyBatch)
+        .sqlQuery("select key from cp.`%s` " +
+            "union all " +
+            "select key from cp.`%s` where 1 = 0",
+          rootSimple,
+          rootSimple)
         .unOrdered()
         .baselineColumns("key")
         .baselineValues(true)
@@ -623,17 +583,14 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test
   public void testUnionAllLeftEmptyDataBatch() throws Exception {
-    String rootSimple = FileUtils.getResourceAsFile("/store/json/booleanData.json").toURI().toString();
-
-    final String queryLeftBatch = String.format(
-        "select key from dfs_test.`%s` where 1 = 0 " +
-            "union all " +
-            "select key from dfs_test.`%s`",
-        rootSimple,
-        rootSimple);
+    String rootSimple = "/store/json/booleanData.json";
 
     testBuilder()
-        .sqlQuery(queryLeftBatch)
+        .sqlQuery("select key from cp.`%s` where 1 = 0 " +
+            "union all " +
+            "select key from cp.`%s`",
+          rootSimple,
+          rootSimple)
         .unOrdered()
         .baselineColumns("key")
         .baselineValues(true)
@@ -644,13 +601,7 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test
   public void testUnionAllBothEmptyDataBatch() throws Exception {
-    String rootSimple = FileUtils.getResourceAsFile("/store/json/booleanData.json").toURI().toString();
-    final String query = String.format(
-        "select key from dfs_test.`%s` where 1 = 0 " +
-            "union all " +
-            "select key from dfs_test.`%s` where 1 = 0",
-        rootSimple,
-        rootSimple);
+    String rootSimple = "/store/json/booleanData.json";
 
     final List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema = Lists.newArrayList();
     final TypeProtos.MajorType majorType = TypeProtos.MajorType.newBuilder()
@@ -660,7 +611,11 @@ public class TestUnionAll extends BaseTestQuery {
     expectedSchema.add(Pair.of(SchemaPath.getSimplePath("key"), majorType));
 
     testBuilder()
-        .sqlQuery(query)
+        .sqlQuery("select key from cp.`%s` where 1 = 0 " +
+            "union all " +
+            "select key from cp.`%s` where 1 = 0",
+          rootSimple,
+          rootSimple)
         .schemaBaseLine(expectedSchema)
         .build()
         .run();
@@ -733,11 +688,12 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test // see DRILL-2746
   public void testFilterPushDownOverUnionAllCSV() throws Exception {
-    String root = FileUtils.getResourceAsFile("/multilevel/csv/1994/Q1/orders_94_q1.csv").toURI().toString();
+    String root = "/multilevel/csv/1994/Q1/orders_94_q1.csv";
+
     String query = String.format("select ct \n" +
-        "from ((select count(c1) as ct from (select columns[0] c1 from dfs.`%s`)) \n" +
+        "from ((select count(c1) as ct from (select columns[0] c1 from cp.`%s`)) \n" +
         "union all \n" +
-        "(select columns[0] c2 from dfs.`%s`)) \n" +
+        "(select columns[0] c2 from cp.`%s`)) \n" +
         "where ct < 100", root, root);
 
     // Validate the plan
@@ -845,10 +801,10 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test // see DRILL-3130
   public void testProjectDownOverUnionAllImplicitCasting() throws Exception {
-    String root = FileUtils.getResourceAsFile("/store/text/data/nations.csv").toURI().toString();
+    String root = "/store/text/data/nations.csv";
     String query = String.format("select 2 * n_nationkey as col from \n" +
         "(select n_nationkey, n_name, n_comment from cp.`tpch/nation.parquet` \n" +
-        "union all select columns[0], columns[1], columns[2] from dfs.`%s`) \n" +
+        "union all select columns[0], columns[1], columns[2] from cp.`%s`) \n" +
         "order by col limit 10", root);
 
     // Validate the plan
@@ -1043,11 +999,11 @@ public class TestUnionAll extends BaseTestQuery {
   @Test // DRILL-4147 // base case
   @Category(UnlikelyTest.class)
   public void testDrill4147_1() throws Exception {
-    final String l = FileUtils.getResourceAsFile("/multilevel/parquet/1994").toURI().toString();
-    final String r = FileUtils.getResourceAsFile("/multilevel/parquet/1995").toURI().toString();
+    final String l = "/multilevel/parquet/1994";
+    final String r = "/multilevel/parquet/1995";
 
-    final String query = String.format("SELECT o_custkey FROM dfs_test.`%s` \n" +
-        "Union All SELECT o_custkey FROM dfs_test.`%s`", l, r);
+    final String query = String.format("SELECT o_custkey FROM dfs.`%s` \n" +
+        "Union All SELECT o_custkey FROM dfs.`%s`", l, r);
 
     // Validate the plan
     final String[] expectedPlan = {"UnionExchange.*\n",
@@ -1073,12 +1029,12 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test // DRILL-4147  // group-by on top of union-all
   public void testDrill4147_2() throws Exception {
-    final String l = FileUtils.getResourceAsFile("/multilevel/parquet/1994").toURI().toString();
-    final String r = FileUtils.getResourceAsFile("/multilevel/parquet/1995").toURI().toString();
+    final String l = "/multilevel/parquet/1994";
+    final String r = "/multilevel/parquet/1995";
 
     final String query = String.format("Select o_custkey, count(*) as cnt from \n" +
-        " (SELECT o_custkey FROM dfs_test.`%s` \n" +
-        "Union All SELECT o_custkey FROM dfs_test.`%s`) \n" +
+        " (SELECT o_custkey FROM dfs.`%s` \n" +
+        "Union All SELECT o_custkey FROM dfs.`%s`) \n" +
         "group by o_custkey", l, r);
 
     // Validate the plan
@@ -1104,12 +1060,12 @@ public class TestUnionAll extends BaseTestQuery {
 
   @Test // DRILL-4147 // union-all above a hash join
   public void testDrill4147_3() throws Exception {
-    final String l = FileUtils.getResourceAsFile("/multilevel/parquet/1994").toURI().toString();
-    final String r = FileUtils.getResourceAsFile("/multilevel/parquet/1995").toURI().toString();
+    final String l = "/multilevel/parquet/1994";
+    final String r = "/multilevel/parquet/1995";
 
     final String query = String.format("SELECT o_custkey FROM \n" +
-        " (select o1.o_custkey from dfs_test.`%s` o1 inner join dfs_test.`%s` o2 on o1.o_orderkey = o2.o_custkey) \n" +
-        " Union All SELECT o_custkey FROM dfs_test.`%s` where o_custkey > 10", l, r, l);
+        " (select o1.o_custkey from dfs.`%s` o1 inner join dfs.`%s` o2 on o1.o_orderkey = o2.o_custkey) \n" +
+        " Union All SELECT o_custkey FROM dfs.`%s` where o_custkey > 10", l, r, l);
 
     // Validate the plan
     final String[] expectedPlan = {"(?s)UnionExchange.*UnionAll.*HashJoin.*"};
@@ -1135,12 +1091,12 @@ public class TestUnionAll extends BaseTestQuery {
   @Test // DRILL-4833  // limit 1 is on RHS of union-all
   @Category(UnlikelyTest.class)
   public void testDrill4833_1() throws Exception {
-    final String l = FileUtils.getResourceAsFile("/multilevel/parquet/1994").toURI().toString();
-    final String r = FileUtils.getResourceAsFile("/multilevel/parquet/1995").toURI().toString();
+    final String l = "/multilevel/parquet/1994";
+    final String r = "/multilevel/parquet/1995";
 
     final String query = String.format("SELECT o_custkey FROM \n" +
-        " ((select o1.o_custkey from dfs_test.`%s` o1 inner join dfs_test.`%s` o2 on o1.o_orderkey = o2.o_custkey) \n" +
-        " Union All (SELECT o_custkey FROM dfs_test.`%s` limit 1))", l, r, l);
+        " ((select o1.o_custkey from dfs.`%s` o1 inner join dfs.`%s` o2 on o1.o_orderkey = o2.o_custkey) \n" +
+        " Union All (SELECT o_custkey FROM dfs.`%s` limit 1))", l, r, l);
 
     // Validate the plan
     final String[] expectedPlan = {"(?s)UnionExchange.*UnionAll.*HashJoin.*"};
@@ -1169,13 +1125,13 @@ public class TestUnionAll extends BaseTestQuery {
   @Test // DRILL-4833  // limit 1 is on LHS of union-all
   @Category(UnlikelyTest.class)
   public void testDrill4833_2() throws Exception {
-    final String l = FileUtils.getResourceAsFile("/multilevel/parquet/1994").toURI().toString();
-    final String r = FileUtils.getResourceAsFile("/multilevel/parquet/1995").toURI().toString();
+    final String l = "/multilevel/parquet/1994";
+    final String r = "/multilevel/parquet/1995";
 
     final String query = String.format("SELECT o_custkey FROM \n" +
-        " ((SELECT o_custkey FROM dfs_test.`%s` limit 1) \n" +
+        " ((SELECT o_custkey FROM dfs.`%s` limit 1) \n" +
         " union all \n" +
-        " (select o1.o_custkey from dfs_test.`%s` o1 inner join dfs_test.`%s` o2 on o1.o_orderkey = o2.o_custkey))", l, r, l);
+        " (select o1.o_custkey from dfs.`%s` o1 inner join dfs.`%s` o2 on o1.o_orderkey = o2.o_custkey))", l, r, l);
 
     // Validate the plan
     final String[] expectedPlan = {"(?s)UnionExchange.*UnionAll.*HashJoin.*"};
@@ -1215,39 +1171,30 @@ public class TestUnionAll extends BaseTestQuery {
   @Test // DRILL-4264
   @Category(UnlikelyTest.class)
   public void testFieldWithDots() throws Exception {
-    File directory = new File(BaseTestQuery.getTempDir("json/input"));
-    try {
-      directory.mkdirs();
-      String fileName = "table.json";
-      try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(directory, fileName)))) {
-        writer.write("{\"rk.q\": \"a\", \"m\": {\"a.b\":\"1\", \"a\":{\"b\":\"2\"}, \"c\":\"3\"}}");
-      }
-
-      String query = String.format("select * from (" +
-                                              "(select t.m.`a.b` as a,\n" +
-                                                      "t.m.a.b as b,\n" +
-                                                      "t.m['a.b'] as c,\n" +
-                                                      "t.rk.q as d,\n" +
-                                                      "t.`rk.q` as e\n" +
-                                              "from dfs_test.`%1$s/%2$s` t)\n" +
-                                            "union all\n" +
-                                              "(select t.m.`a.b` as a,\n" +
-                                                      "t.m.a.b as b,\n" +
-                                                      "t.m['a.b'] as c,\n" +
-                                                      "t.rk.q as d,\n" +
-                                                      "t.`rk.q` as e\n" +
-                                              "from dfs_test.`%1$s/%2$s` t))",
-                                  directory.toPath().toString(), fileName);
-      testBuilder()
-        .sqlQuery(query)
-        .unOrdered()
-        .baselineColumns("a", "b", "c", "d", "e")
-        .baselineValues("1", "2", "1", null, "a")
-        .baselineValues("1", "2", "1", null, "a")
-        .go();
-
-    } finally {
-      org.apache.commons.io.FileUtils.deleteQuietly(directory);
+    String fileName = "table.json";
+    try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), fileName)))) {
+      writer.write("{\"rk.q\": \"a\", \"m\": {\"a.b\":\"1\", \"a\":{\"b\":\"2\"}, \"c\":\"3\"}}");
     }
+
+    testBuilder()
+      .sqlQuery("select * from (" +
+        "(select t.m.`a.b` as a,\n" +
+        "t.m.a.b as b,\n" +
+        "t.m['a.b'] as c,\n" +
+        "t.rk.q as d,\n" +
+        "t.`rk.q` as e\n" +
+        "from dfs.`%1$s` t)\n" +
+        "union all\n" +
+        "(select t.m.`a.b` as a,\n" +
+        "t.m.a.b as b,\n" +
+        "t.m['a.b'] as c,\n" +
+        "t.rk.q as d,\n" +
+        "t.`rk.q` as e\n" +
+        "from dfs.`%1$s` t))", fileName)
+      .unOrdered()
+      .baselineColumns("a", "b", "c", "d", "e")
+      .baselineValues("1", "2", "1", null, "a")
+      .baselineValues("1", "2", "1", null, "a")
+      .go();
   }
 }
\ No newline at end of file


Mime
View raw message