drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From prog...@apache.org
Subject [12/22] drill git commit: DRILL-5783, DRILL-5841, DRILL-5894: Rationalize test temp directories
Date Wed, 15 Nov 2017 01:46:58 GMT
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestEarlyLimit0Optimization.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestEarlyLimit0Optimization.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestEarlyLimit0Optimization.java
index f8b0d47..8bbe914 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestEarlyLimit0Optimization.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestEarlyLimit0Optimization.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.physical.impl.limit;
 
 import com.google.common.collect.Lists;
 import org.apache.commons.lang3.tuple.Pair;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.PlanTestBase;
 import org.apache.drill.categories.PlannerTest;
 import org.apache.drill.common.expression.SchemaPath;
@@ -48,8 +48,8 @@ public class TestEarlyLimit0Optimization extends BaseTestQuery {
 
   @BeforeClass
   public static void createView() throws Exception {
-    test("USE dfs_test.tmp");
-    test(String.format("CREATE OR REPLACE VIEW %s AS SELECT " +
+    test("USE dfs.tmp");
+    test("CREATE OR REPLACE VIEW %s AS SELECT " +
         "CAST(employee_id AS INT) AS employee_id, " +
         "CAST(full_name AS VARCHAR(25)) AS full_name, " +
         "CAST(position_id AS INTEGER) AS position_id, " +
@@ -63,26 +63,22 @@ public class TestEarlyLimit0Optimization extends BaseTestQuery {
         "CAST(gender AS CHAR(1)) AS gender " +
         "FROM cp.`employee.json` " +
         "ORDER BY employee_id " +
-        "LIMIT 1;", viewName));
-    // { "employee_id":1,"full_name":"Sheri Nowmer","first_name":"Sheri","last_name":"Nowmer","position_id":1,
-    // "position_title":"President","store_id":0,"department_id":1,"birth_date":"1961-08-26",
-    // "hire_date":"1994-12-01 00:00:00.0","end_date":null,"salary":80000.0000,"supervisor_id":0,
-    // "education_level":"Graduate Degree","marital_status":"S","gender":"F","management_role":"Senior Management" }
+        "LIMIT 1", viewName);
   }
 
   @AfterClass
   public static void tearDownView() throws Exception {
-    test("DROP VIEW " + viewName + ";");
+    test("DROP VIEW " + viewName);
   }
 
   @Before
   public void setOption() throws Exception {
-    test("SET `%s` = true;", ExecConstants.EARLY_LIMIT0_OPT_KEY);
+    test("SET `%s` = true", ExecConstants.EARLY_LIMIT0_OPT_KEY);
   }
 
   @After
   public void resetOption() throws Exception {
-    test("RESET `%s`;", ExecConstants.EARLY_LIMIT0_OPT_KEY);
+    test("RESET `%s`", ExecConstants.EARLY_LIMIT0_OPT_KEY);
   }
 
   // -------------------- SIMPLE QUERIES --------------------
@@ -90,7 +86,7 @@ public class TestEarlyLimit0Optimization extends BaseTestQuery {
   @Test
   public void infoSchema() throws Exception {
     testBuilder()
-        .sqlQuery(String.format("DESCRIBE %s", viewName))
+        .sqlQuery("DESCRIBE %s", viewName)
         .unOrdered()
         .baselineColumns("COLUMN_NAME", "DATA_TYPE", "IS_NULLABLE")
         .baselineValues("employee_id", "INTEGER", "YES")
@@ -110,7 +106,7 @@ public class TestEarlyLimit0Optimization extends BaseTestQuery {
   @Test
   public void simpleSelect() throws Exception {
     testBuilder()
-        .sqlQuery(String.format("SELECT * FROM %s", viewName))
+        .sqlQuery("SELECT * FROM %s", viewName)
         .ordered()
         .baselineColumns("employee_id", "full_name", "position_id", "department_id", "birth_date", "hire_date",
             "salary", "fsalary", "single", "education_level", "gender")
@@ -303,7 +299,7 @@ public class TestEarlyLimit0Optimization extends BaseTestQuery {
         .ordered()
         .baselineColumns("s", "p", "a", "c")
         .baselineValues(null, 0.0D, 1.0D, 1L)
-        .go();
+         .go();
 
     testBuilder()
         .sqlQuery(wrapLimit0(query))
@@ -339,10 +335,10 @@ public class TestEarlyLimit0Optimization extends BaseTestQuery {
 
   @Test
   public void nullableSumAndCount() throws Exception {
-    final String query = "SELECT " +
+    final String query = String.format("SELECT " +
         "COUNT(position_id) AS c, " +
         "SUM(CAST((CASE WHEN position_id = 1 THEN NULL ELSE position_id END) AS INT)) AS p " +
-        "FROM " + viewName;
+        "FROM %s", viewName);
 
     @SuppressWarnings("unchecked")
     final List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema = Lists.newArrayList(
@@ -412,13 +408,13 @@ public class TestEarlyLimit0Optimization extends BaseTestQuery {
 
   @Test
   public void sumsAndCounts1() throws Exception {
-    final String query = "SELECT " +
+    final String query = String.format("SELECT " +
         "COUNT(*) as cs, " +
         "COUNT(1) as c1, " +
         "COUNT(employee_id) as cc, " +
         "SUM(1) as s1," +
         "department_id " +
-        " FROM " + viewName + " GROUP BY department_id";
+        " FROM %s GROUP BY department_id", viewName);
 
     @SuppressWarnings("unchecked")
     final List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema = Lists.newArrayList(
@@ -507,8 +503,8 @@ public class TestEarlyLimit0Optimization extends BaseTestQuery {
 
   @Test
   public void cast() throws Exception {
-    final String query = "SELECT CAST(fsalary AS DOUBLE) AS d," +
-        "CAST(employee_id AS BIGINT) AS e FROM " + viewName;
+    final String query = String.format("SELECT CAST(fsalary AS DOUBLE) AS d," +
+        "CAST(employee_id AS BIGINT) AS e FROM %s", viewName);
 
     @SuppressWarnings("unchecked")
     final List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema = Lists.newArrayList(

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestLimitWithExchanges.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestLimitWithExchanges.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestLimitWithExchanges.java
index b49a12e..7a2ee07 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestLimitWithExchanges.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestLimitWithExchanges.java
@@ -17,21 +17,27 @@
  */
 package org.apache.drill.exec.physical.impl.limit;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.PlanTestBase;
-import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.test.OperatorFixture;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.nio.file.Paths;
+
 import static org.junit.Assert.assertEquals;
 
 @Category(OperatorTest.class)
 public class TestLimitWithExchanges extends BaseTestQuery {
-  final String WORKING_PATH = TestTools.getWorkingPath();
-  final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources";
+  @BeforeClass
+  public static void setupTestFiles() {
+    dirTestWatcher.copyResourceToRoot(Paths.get("multilevel", "json"));
+    dirTestWatcher.copyResourceToRoot(Paths.get("tpchmulti", "region"));
+    dirTestWatcher.copyResourceToRoot(Paths.get("tpchmulti", "nation"));
+  }
 
   @Test
   public void testLimitWithExchanges() throws Exception{
@@ -46,30 +52,28 @@ public class TestLimitWithExchanges extends BaseTestQuery {
       final String[] excludedPlan = {};
 
       // case 1. single table query.
-      final String sql = String.format("select * from dfs_test.`%s/multilevel/json` limit 1 offset 2", TEST_RES_PATH);
+      final String sql = "select * from dfs.`multilevel/json` limit 1 offset 2";
       final String[] expectedPlan ={"(?s)Limit\\(offset=\\[2\\], fetch=\\[1\\].*UnionExchange.*Limit\\(fetch=\\[3\\]\\).*Scan"};
       testLimitHelper(sql, expectedPlan, excludedPlan, 1);
 
-      final String sql2 = String.format("select * from dfs_test.`%s/multilevel/json` limit 1 offset 0", TEST_RES_PATH);
+      final String sql2 = "select * from dfs.`multilevel/json` limit 1 offset 0";
       final String[] expectedPlan2 = {"(?s)Limit\\(offset=\\[0\\], fetch=\\[1\\].*UnionExchange.*Limit\\(fetch=\\[1\\]\\).*Scan"};
       testLimitHelper(sql2, expectedPlan2, excludedPlan, 1);
 
-      final String sql3 = String.format("select * from dfs_test.`%s/multilevel/json` limit 1", TEST_RES_PATH);
+      final String sql3 = "select * from dfs.`multilevel/json` limit 1";
       final String[] expectedPlan3 = {"(?s)Limit\\(fetch=\\[1\\].*UnionExchange.*Limit\\(fetch=\\[1\\]\\).*Scan"};
       testLimitHelper(sql3, expectedPlan3, excludedPlan, 1);
 
       // case 2: join query.
-      final String sql4 = String.format(
-          "select * from dfs_test.`%s/tpchmulti/region` r,  dfs_test.`%s/tpchmulti/nation` n " +
-          "where r.r_regionkey = n.n_regionkey limit 1 offset 2", TEST_RES_PATH, TEST_RES_PATH );
+      final String sql4 = "select * from dfs.`tpchmulti/region` r, dfs.`tpchmulti/nation` n " +
+          "where r.r_regionkey = n.n_regionkey limit 1 offset 2";
 
       final String[] expectedPlan4 = {"(?s)Limit\\(offset=\\[2\\], fetch=\\[1\\].*UnionExchange.*Limit\\(fetch=\\[3\\]\\).*Join"};
 
       testLimitHelper(sql4, expectedPlan4, excludedPlan, 1);
 
-      final String sql5 = String.format(
-          "select * from dfs_test.`%s/tpchmulti/region` r,  dfs_test.`%s/tpchmulti/nation` n " +
-              "where r.r_regionkey = n.n_regionkey limit 1", TEST_RES_PATH, TEST_RES_PATH );
+      final String sql5 = "select * from dfs.`tpchmulti/region` r,  dfs.`tpchmulti/nation` n " +
+              "where r.r_regionkey = n.n_regionkey limit 1";
 
       final String[] expectedPlan5 = {"(?s)Limit\\(fetch=\\[1\\].*UnionExchange.*Limit\\(fetch=\\[1\\]\\).*Join"};
       testLimitHelper(sql5, expectedPlan5, excludedPlan, 1);
@@ -86,11 +90,8 @@ public class TestLimitWithExchanges extends BaseTestQuery {
       final String[] expectedPlan ={};
 
       // case 1. Only "offset", but no "limit" : should not push "limit" down.
-      final String sql = String.format("select * from dfs_test.`%s/tpchmulti/region` offset 2", TEST_RES_PATH);
-      final String[] excludedPlan = {"(?s)Limit\\(offset=\\[2\\].*UnionExchange.*Limit.*Scan"};
-
       // case 2. "limit" is higher than # of rowcount in table : should not push "limit" down.
-      final String sql2 = String.format("select * from dfs_test.`%s/tpchmulti/region` limit 100", TEST_RES_PATH);
+      final String sql2 = "select * from dfs.`tpchmulti/region` limit 100";
       final String[] excludedPlan2 = {"(?s)Limit\\(fetch=\\[100\\].*UnionExchange.*Limit.*Scan"};
 
       testLimitHelper(sql2, expectedPlan, excludedPlan2, 5);
@@ -107,16 +108,16 @@ public class TestLimitWithExchanges extends BaseTestQuery {
       // nation has 3 files, total 25 rows.
       // Given slice_target = 5, if # of rows to fetch is < 5 : do NOT insert Exchange, and the query should run in single fragment.
       //                         if # of row to fetch is >= 5:  do insert exchange, and query should run in multiple fragments.
-      final String sql = String.format("select * from dfs_test.`%s/tpchmulti/nation` limit 2", TEST_RES_PATH);  // Test Limit_On_Scan rule.
-      final String sql2 = String.format("select n_nationkey + 1000 from dfs_test.`%s/tpchmulti/nation` limit 2", TEST_RES_PATH); // Test Limit_On_Project rule.
+      final String sql = "select * from dfs.`tpchmulti/nation` limit 2";  // Test Limit_On_Scan rule.
+      final String sql2 = "select n_nationkey + 1000 from dfs.`tpchmulti/nation` limit 2"; // Test Limit_On_Project rule.
       final String [] expectedPlan = {};
       final String [] excludedPlan = {"UnionExchange"};
 
       testLimitHelper(sql, expectedPlan, excludedPlan, 2);
       testLimitHelper(sql2, expectedPlan, excludedPlan, 2);
 
-      final String sql3 = String.format("select * from dfs_test.`%s/tpchmulti/nation` limit 10", TEST_RES_PATH); // Test Limit_On_Scan rule.
-      final String sql4 = String.format("select n_nationkey + 1000 from dfs_test.`%s/tpchmulti/nation` limit 10", TEST_RES_PATH); // Test Limit_On_Project rule.
+      final String sql3 = "select * from dfs.`tpchmulti/nation` limit 10"; // Test Limit_On_Scan rule.
+      final String sql4 = "select n_nationkey + 1000 from dfs.`tpchmulti/nation` limit 10"; // Test Limit_On_Project rule.
 
       final String [] expectedPlan2 = {"UnionExchange"};
       final String [] excludedPlan2 = {};
@@ -130,7 +131,7 @@ public class TestLimitWithExchanges extends BaseTestQuery {
 
   @Test
   public void TestLimitAllOnParquet() throws Exception {
-    final String query = String.format("select t.n_nationkey from cp.`tpch/nation.parquet` t limit all offset 5", TEST_RES_PATH);
+    final String query = "select t.n_nationkey from cp.`tpch/nation.parquet` t limit all offset 5";
     final String [] expectedPlan = {};
     final String [] excludedPlan = {"UnionExchange"};
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestSimpleLimit.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestSimpleLimit.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestSimpleLimit.java
index 042d38e..2032afc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestSimpleLimit.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/limit/TestSimpleLimit.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertTrue;
 
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -83,7 +83,7 @@ public class TestSimpleLimit extends ExecTest {
 
   private void verifyLimitCount(DrillbitContext bitContext, UserClientConnection connection, String testPlan, int expectedCount) throws Throwable {
     final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/limit/" + testPlan), Charsets.UTF_8));
+    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/limit/" + testPlan), Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
@@ -103,7 +103,7 @@ public class TestSimpleLimit extends ExecTest {
 
   private void verifySum(DrillbitContext bitContext, UserClientConnection connection, String testPlan, int expectedCount, long expectedSum) throws Throwable {
     final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/limit/" + testPlan), Charsets.UTF_8));
+    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/limit/" + testPlan), Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java
index f0aa1b7..46afb42 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java
@@ -24,7 +24,7 @@ import static org.junit.Assert.assertTrue;
 import java.util.List;
 
 import org.apache.drill.categories.OperatorTest;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.client.DrillClient;
 import org.apache.drill.exec.pop.PopUnitTestBase;
 import org.apache.drill.exec.proto.UserBitShared.QueryData;
@@ -57,7 +57,7 @@ public class TestMergingReceiver extends PopUnitTestBase {
       bit2.run();
       client.connect();
       final List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-        Files.toString(FileUtils.getResourceAsFile("/mergerecv/merging_receiver.json"),
+        Files.toString(DrillFileUtils.getResourceAsFile("/mergerecv/merging_receiver.json"),
           Charsets.UTF_8));
       int count = 0;
       final RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
@@ -88,7 +88,7 @@ public class TestMergingReceiver extends PopUnitTestBase {
       client.connect();
       final List<QueryDataBatch> results =
           client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-              Files.toString(FileUtils.getResourceAsFile("/mergerecv/multiple_providers.json"),
+              Files.toString(DrillFileUtils.getResourceAsFile("/mergerecv/multiple_providers.json"),
                   Charsets.UTF_8));
       int count = 0;
       final RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
@@ -137,7 +137,7 @@ public class TestMergingReceiver extends PopUnitTestBase {
       client.connect();
       final List<QueryDataBatch> results =
           client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-              Files.toString(FileUtils.getResourceAsFile("/mergerecv/empty_batch.json"),
+              Files.toString(DrillFileUtils.getResourceAsFile("/mergerecv/empty_batch.json"),
                   Charsets.UTF_8));
       int count = 0;
       final RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
@@ -167,7 +167,7 @@ public class TestMergingReceiver extends PopUnitTestBase {
       client.connect();
       final List<QueryDataBatch> results =
           client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-              Files.toString(FileUtils.getResourceAsFile("/mergerecv/empty_batch_noschema.json"),
+              Files.toString(DrillFileUtils.getResourceAsFile("/mergerecv/empty_batch_noschema.json"),
                   Charsets.UTF_8));
       int count = 0;
       final RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());
@@ -197,7 +197,7 @@ public class TestMergingReceiver extends PopUnitTestBase {
       client.connect();
       final List<QueryDataBatch> results =
           client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-              Files.toString(FileUtils.getResourceAsFile("/mergerecv/multiple_providers_empty_batches.json"),
+              Files.toString(DrillFileUtils.getResourceAsFile("/mergerecv/multiple_providers_empty_batches.json"),
                   Charsets.UTF_8));
       int count = 0;
       final RecordBatchLoader batchLoader = new RecordBatchLoader(client.getAllocator());

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java
index 4b00821..5a21ef1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/orderedpartitioner/TestOrderedPartitionExchange.java
@@ -28,7 +28,7 @@ import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.expression.ExpressionPosition;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.scanner.ClassPathScanner;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.client.DrillClient;
 import org.apache.drill.exec.pop.PopUnitTestBase;
 import org.apache.drill.exec.record.RecordBatchLoader;
@@ -77,7 +77,7 @@ public class TestOrderedPartitionExchange extends PopUnitTestBase {
       bit2.run();
       client.connect();
       List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-          Files.toString(FileUtils.getResourceAsFile("/sender/ordered_exchange.json"),
+          Files.toString(DrillFileUtils.getResourceAsFile("/sender/ordered_exchange.json"),
               Charsets.UTF_8));
       int count = 0;
       List<Integer> partitionRecordCounts = Lists.newArrayList();

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/partitionsender/TestPartitionSender.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/partitionsender/TestPartitionSender.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/partitionsender/TestPartitionSender.java
index e35fba7..544ed49 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/partitionsender/TestPartitionSender.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/partitionsender/TestPartitionSender.java
@@ -25,6 +25,7 @@ import static org.junit.Assert.fail;
 import java.io.File;
 import java.io.IOException;
 import java.io.PrintWriter;
+import java.nio.file.Paths;
 import java.util.List;
 import java.util.Random;
 
@@ -67,11 +68,9 @@ import org.apache.drill.exec.server.options.OptionValue.AccessibleScopes;
 import org.apache.drill.exec.server.options.OptionValue.OptionScope;
 import org.apache.drill.exec.util.Utilities;
 import org.apache.drill.exec.work.QueryWorkUnit;
-import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.rules.TemporaryFolder;
 import org.mockito.Mockito;
 
 import com.google.common.collect.Lists;
@@ -94,31 +93,22 @@ public class TestPartitionSender extends PlanTestBase {
       .withCredentials(UserBitShared.UserCredentials.newBuilder().setUserName("foo").build())
       .build();
 
-
-  public static TemporaryFolder testTempFolder = new TemporaryFolder();
-
-  private final static int NUM_DEPTS = 40;
-  private final static int NUM_EMPLOYEES = 1000;
-  private final static int DRILLBITS_COUNT = 3;
-
-  private static String empTableLocation;
+  private static final int NUM_DEPTS = 40;
+  private static final int NUM_EMPLOYEES = 1000;
+  private static final int DRILLBITS_COUNT = 3;
+  private static final String TABLE = "table";
 
   private static String groupByQuery;
 
   @BeforeClass
-  public static void setupTempFolder() throws IOException {
-    testTempFolder.create();
-  }
-
-  @BeforeClass
   public static void generateTestDataAndQueries() throws Exception {
     // Table consists of two columns "emp_id", "emp_name" and "dept_id"
-    empTableLocation = testTempFolder.newFolder().getAbsolutePath();
+    final File empTableLocation = dirTestWatcher.makeRootSubDir(Paths.get(TABLE));
 
     // Write 100 records for each new file
     final int empNumRecsPerFile = 100;
     for(int fileIndex=0; fileIndex<NUM_EMPLOYEES/empNumRecsPerFile; fileIndex++) {
-      File file = new File(empTableLocation + File.separator + fileIndex + ".json");
+      File file = new File(empTableLocation, fileIndex + ".json");
       PrintWriter printWriter = new PrintWriter(file);
       for (int recordIndex = fileIndex*empNumRecsPerFile; recordIndex < (fileIndex+1)*empNumRecsPerFile; recordIndex++) {
         String record = String.format("{ \"emp_id\" : %d, \"emp_name\" : \"Employee %d\", \"dept_id\" : %d }",
@@ -129,12 +119,7 @@ public class TestPartitionSender extends PlanTestBase {
     }
 
     // Initialize test queries
-    groupByQuery = String.format("SELECT dept_id, count(*) as numEmployees FROM dfs.`%s` GROUP BY dept_id", empTableLocation);
-  }
-
-  @AfterClass
-  public static void cleanupTempFolder() throws IOException {
-    testTempFolder.delete();
+    groupByQuery = String.format("SELECT dept_id, count(*) as numEmployees FROM dfs.`%s` GROUP BY dept_id", TABLE);
   }
 
   @Test

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/project/TestSimpleProjection.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/project/TestSimpleProjection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/project/TestSimpleProjection.java
index f62d133..022d153 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/project/TestSimpleProjection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/project/TestSimpleProjection.java
@@ -24,7 +24,7 @@ import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.expression.ExpressionPosition;
 import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -57,7 +57,7 @@ public class TestSimpleProjection extends ExecTest {
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/project/test1.json"), Charsets.UTF_8));
+    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/project/test1.json"), Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSimpleSort.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSimpleSort.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSimpleSort.java
index 77b03e3..98c997f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSimpleSort.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSimpleSort.java
@@ -24,7 +24,7 @@ import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.expression.ExpressionPosition;
 import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -59,7 +59,7 @@ public class TestSimpleSort extends ExecTest {
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/sort/one_key_sort.json"), Charsets.UTF_8));
+    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/sort/one_key_sort.json"), Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));
@@ -97,7 +97,7 @@ public class TestSimpleSort extends ExecTest {
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/sort/two_key_sort.json"), Charsets.UTF_8));
+    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/sort/two_key_sort.json"), Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSort.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSort.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSort.java
index 19f4f61..7a62706 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSort.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSort.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.physical.impl.sort;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.util.JsonStringArrayList;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/svremover/TestSVRemover.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/svremover/TestSVRemover.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/svremover/TestSVRemover.java
index 15b13d8..50b18ea 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/svremover/TestSVRemover.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/svremover/TestSVRemover.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.physical.impl.svremover;
 
 import static org.junit.Assert.assertEquals;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.Test;
 
 public class TestSVRemover extends BaseTestQuery {

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/trace/TestTraceMultiRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/trace/TestTraceMultiRecordBatch.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/trace/TestTraceMultiRecordBatch.java
index b00e143..51d1e7b 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/trace/TestTraceMultiRecordBatch.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/trace/TestTraceMultiRecordBatch.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertTrue;
 
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -61,7 +61,7 @@ public class TestTraceMultiRecordBatch extends ExecTest {
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/trace/multi_record_batch_trace.json"), Charsets.UTF_8));
+    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/trace/multi_record_batch_trace.json"), Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/trace/TestTraceOutputDump.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/trace/TestTraceOutputDump.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/trace/TestTraceOutputDump.java
index 3e08ed1..780c83d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/trace/TestTraceOutputDump.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/trace/TestTraceOutputDump.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertTrue;
 
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.cache.VectorAccessibleSerializable;
@@ -75,7 +75,7 @@ public class TestTraceOutputDump extends ExecTest {
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/trace/simple_trace.json"), Charsets.UTF_8));
+    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/trace/simple_trace.json"), Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/union/TestSimpleUnion.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/union/TestSimpleUnion.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/union/TestSimpleUnion.java
index 9002936..4f49355 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/union/TestSimpleUnion.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/union/TestSimpleUnion.java
@@ -22,7 +22,7 @@ import static org.junit.Assert.assertTrue;
 
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
@@ -54,7 +54,7 @@ public class TestSimpleUnion extends ExecTest {
     mockDrillbitContext(bitContext);
 
     final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
-    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/union/test1.json"), Charsets.UTF_8));
+    final PhysicalPlan plan = reader.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/union/test1.json"), Charsets.UTF_8));
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
     final FragmentContext context = new FragmentContext(bitContext, PlanFragment.getDefaultInstance(), connection, registry);
     final SimpleRootExec exec = new SimpleRootExec(ImplCreator.getExec(context, (FragmentRoot) plan.getSortedOperators(false).iterator().next()));

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestValidationOptions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestValidationOptions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestValidationOptions.java
index 4b3cbff..2fd9d6f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestValidationOptions.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/validate/TestValidationOptions.java
@@ -21,14 +21,17 @@ import static org.junit.Assert.assertFalse;
 
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.store.easy.text.compliant.CompliantTextRecordReader;
+import org.apache.drill.test.BaseDirTestWatcher;
 import org.apache.drill.test.ClientFixture;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.DrillTest;
 import org.apache.drill.test.ClusterFixtureBuilder;
 import org.apache.drill.test.LogFixture;
+import org.apache.drill.test.DirTestWatcher;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
+import org.junit.Rule;
 import org.junit.Test;
 
 import ch.qos.logback.classic.Level;
@@ -38,6 +41,9 @@ public class TestValidationOptions extends DrillTest {
 
   protected static LogFixture logFixture;
 
+  @Rule
+  public final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher();
+
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
     logFixture = LogFixture.builder()
@@ -59,13 +65,12 @@ public class TestValidationOptions extends DrillTest {
 
   @Test
   public void testOptions() throws Exception {
-    ClusterFixtureBuilder builder = ClusterFixture.builder()
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
         .maxParallelization(1)
         .configProperty(ExecConstants.ENABLE_ITERATOR_VALIDATION, false)
         .configProperty(ExecConstants.ENABLE_VECTOR_VALIDATION, false)
         .sessionOption(ExecConstants.ENABLE_ITERATOR_VALIDATION_OPTION, true)
-        .sessionOption(ExecConstants.ENABLE_VECTOR_VALIDATION_OPTION, true)
-        ;
+        .sessionOption(ExecConstants.ENABLE_VECTOR_VALIDATION_OPTION, true);
     try (ClusterFixture cluster = builder.build();
          ClientFixture client = cluster.clientFixture()) {
 
@@ -93,13 +98,12 @@ public class TestValidationOptions extends DrillTest {
 
   @Test
   public void testConfig() throws Exception {
-    ClusterFixtureBuilder builder = ClusterFixture.builder()
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
         .maxParallelization(1)
         .configProperty(ExecConstants.ENABLE_ITERATOR_VALIDATION, true)
         .configProperty(ExecConstants.ENABLE_VECTOR_VALIDATION, true)
         .sessionOption(ExecConstants.ENABLE_ITERATOR_VALIDATION_OPTION, false)
-        .sessionOption(ExecConstants.ENABLE_VECTOR_VALIDATION_OPTION, false)
-        ;
+        .sessionOption(ExecConstants.ENABLE_VECTOR_VALIDATION_OPTION, false);
     try (ClusterFixture cluster = builder.build();
          ClientFixture client = cluster.clientFixture()) {
 
@@ -119,9 +123,8 @@ public class TestValidationOptions extends DrillTest {
 
   @Test
   public void testDefaults() throws Exception {
-    ClusterFixtureBuilder builder = ClusterFixture.builder()
-        .maxParallelization(1)
-        ;
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+        .maxParallelization(1);
     try (ClusterFixture cluster = builder.build();
          ClientFixture client = cluster.clientFixture()) {
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/GenerateTestData.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/GenerateTestData.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/GenerateTestData.java
index c68cbe5..15b33c9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/GenerateTestData.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/GenerateTestData.java
@@ -17,11 +17,13 @@
  */
 package org.apache.drill.exec.physical.impl.window;
 
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.test.TestTools;
 
 import java.io.File;
 import java.io.FileNotFoundException;
 import java.io.PrintStream;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
@@ -268,7 +270,7 @@ public class GenerateTestData {
     }
   }
 
-  private static void writeData(final String path, final Partition[] partitions, final boolean addLineNo)
+  private static void writeData(final Path path, final Partition[] partitions, final boolean addLineNo)
       throws FileNotFoundException {
 
     // total number of rows
@@ -283,7 +285,7 @@ public class GenerateTestData {
 
     // data file(s)
     int fileId = 0;
-    PrintStream dataStream = new PrintStream(path + "/" + fileId + ".data.json");
+    PrintStream dataStream = new PrintStream(path.resolve(fileId + ".data.json").toFile());
 
     int emp_idx = 0;
     int lineNo = 0;
@@ -316,11 +318,11 @@ public class GenerateTestData {
     dataStream.close();
   }
 
-  private static void writeResults(final String path, final String prefix, final Partition[] partitions) throws FileNotFoundException {
+  private static void writeResults(final Path path, final String prefix, final Partition[] partitions) throws FileNotFoundException {
     // expected results for query without order by clause
-    final PrintStream resultStream = new PrintStream(path + prefix + ".tsv");
+    final PrintStream resultStream = new PrintStream(path.toString() + prefix + ".tsv");
     // expected results for query with order by clause
-    final PrintStream resultOrderStream = new PrintStream(path + prefix + ".oby.tsv");
+    final PrintStream resultOrderStream = new PrintStream(path.toString() + prefix + ".oby.tsv");
 
     int idx = 0;
     for (final Partition partition : partitions) {
@@ -354,11 +356,12 @@ public class GenerateTestData {
 
   private static void generateData(final String tableName, final Partition[] pby_data, final Partition[] nopby_data,
       final boolean addLineNo) throws FileNotFoundException {
-    final String WORKING_PATH = TestTools.getWorkingPath();
-    final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources";
-    final String path = TEST_RES_PATH+"/window/" + tableName;
+    final Path path = TestTools.WORKING_PATH
+      .resolve(TestTools.TEST_RESOURCES)
+      .resolve(Paths.get("window", tableName));
+
+    final File pathFolder = path.toFile();
 
-    final File pathFolder = new File(path);
     if (!pathFolder.exists()) {
       if (!pathFolder.mkdirs()) {
         System.err.printf("Couldn't create folder %s, exiting%n", path);
@@ -366,7 +369,6 @@ public class GenerateTestData {
     }
 
     writeData(path, pby_data, addLineNo);
-
     writeResults(path, "", nopby_data);
     writeResults(path, ".pby", pby_data);
   }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/TestWindowFrame.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/TestWindowFrame.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/TestWindowFrame.java
index ab1ca31..90183d9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/TestWindowFrame.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/window/TestWindowFrame.java
@@ -17,14 +17,15 @@
  ******************************************************************************/
 package org.apache.drill.exec.physical.impl.window;
 
+import java.io.IOException;
+import java.nio.file.Paths;
 import java.util.Properties;
 
-import org.apache.drill.BaseTestQuery;
-import org.apache.drill.DrillTestWrapper;
+import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.DrillTestWrapper;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.UserRemoteException;
-import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.ExecConstants;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.fail;
@@ -35,22 +36,20 @@ import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 public class TestWindowFrame extends BaseTestQuery {
-
-  private static final String TEST_RES_PATH = TestTools.getWorkingPath() + "/src/test/resources";
-
   @BeforeClass
-  public static void setupMSortBatchSize() {
+  public static void setupMSortBatchSize() throws IOException {
     // make sure memory sorter outputs 20 rows per batch
     final Properties props = cloneDefaultTestConfigProperties();
     props.put(ExecConstants.EXTERNAL_SORT_MSORT_MAX_BATCHSIZE, Integer.toString(20));
 
     updateTestCluster(1, DrillConfig.create(props));
+    dirTestWatcher.copyResourceToRoot(Paths.get("window"));
   }
 
   private DrillTestWrapper buildWindowQuery(final String tableName, final boolean withPartitionBy, final int numBatches)
       throws Exception {
     return testBuilder()
-      .sqlQuery(getFile("window/q1.sql"), TEST_RES_PATH, tableName, withPartitionBy ? "(partition by position_id)":"()")
+      .sqlQuery(getFile("window/q1.sql"), tableName, withPartitionBy ? "(partition by position_id)":"()")
       .ordered()
       .csvBaselineFile("window/" + tableName + (withPartitionBy ? ".pby" : "") + ".tsv")
       .baselineColumns("count", "sum")
@@ -61,7 +60,7 @@ public class TestWindowFrame extends BaseTestQuery {
   private DrillTestWrapper buildWindowWithOrderByQuery(final String tableName, final boolean withPartitionBy,
                                                        final int numBatches) throws Exception {
     return testBuilder()
-      .sqlQuery(getFile("window/q2.sql"), TEST_RES_PATH, tableName, withPartitionBy ? "(partition by position_id order by sub)" : "(order by sub)")
+      .sqlQuery(getFile("window/q2.sql"), tableName, withPartitionBy ? "(partition by position_id order by sub)" : "(order by sub)")
       .ordered()
       .csvBaselineFile("window/" + tableName + (withPartitionBy ? ".pby" : "") + ".oby.tsv")
       .baselineColumns("count", "sum", "row_number", "rank", "dense_rank", "cume_dist", "percent_rank")
@@ -104,23 +103,23 @@ public class TestWindowFrame extends BaseTestQuery {
   public void testMultipleFramers() throws Exception {
     final String window = " OVER(PARTITION BY position_id ORDER by sub)";
     test("SELECT COUNT(*)"+window+", SUM(salary)"+window+", ROW_NUMBER()"+window+", RANK()"+window+" " +
-      "FROM dfs_test.`"+TEST_RES_PATH+"/window/b1.p1`"
+      "FROM dfs.`window/b1.p1`"
     );
   }
 
   @Test
   public void testUnboundedFollowing() throws Exception {
     testBuilder()
-      .sqlQuery(getFile("window/q3.sql"), TEST_RES_PATH)
+      .sqlQuery(getFile("window/q3.sql"))
       .ordered()
-      .sqlBaselineQuery(getFile("window/q4.sql"), TEST_RES_PATH)
+      .sqlBaselineQuery(getFile("window/q4.sql"))
       .build()
       .run();
   }
 
   @Test
   public void testAggregateRowsUnboundedAndCurrentRow() throws Exception {
-    final String table = "dfs_test.`"+TEST_RES_PATH+"/window/b4.p4`";
+    final String table = "dfs.`window/b4.p4`";
     testBuilder()
       .sqlQuery(getFile("window/aggregate_rows_unbounded_current.sql"), table)
       .ordered()
@@ -131,7 +130,7 @@ public class TestWindowFrame extends BaseTestQuery {
 
   @Test
   public void testLastValueRowsUnboundedAndCurrentRow() throws Exception {
-    final String table = "dfs_test.`"+TEST_RES_PATH+"/window/b4.p4`";
+    final String table = "dfs.`window/b4.p4`";
     testBuilder()
       .sqlQuery(getFile("window/last_value_rows_unbounded_current.sql"), table)
       .unOrdered()
@@ -142,7 +141,7 @@ public class TestWindowFrame extends BaseTestQuery {
 
   @Test
   public void testAggregateRangeCurrentAndCurrent() throws Exception {
-    final String table = "dfs_test.`"+TEST_RES_PATH+"/window/b4.p4`";
+    final String table = "dfs.`window/b4.p4`";
     testBuilder()
       .sqlQuery(getFile("window/aggregate_range_current_current.sql"), table)
       .unOrdered()
@@ -153,7 +152,7 @@ public class TestWindowFrame extends BaseTestQuery {
 
   @Test
   public void testFirstValueRangeCurrentAndCurrent() throws Exception {
-    final String table = "dfs_test.`"+TEST_RES_PATH+"/window/b4.p4`";
+    final String table = "dfs.`window/b4.p4`";
     testBuilder()
       .sqlQuery(getFile("window/first_value_range_current_current.sql"), table)
       .unOrdered()
@@ -210,27 +209,27 @@ public class TestWindowFrame extends BaseTestQuery {
   @Test // DRILL-3218
   @Category(UnlikelyTest.class)
   public void testMaxVarChar() throws Exception {
-    test(getFile("window/q3218.sql"), TEST_RES_PATH);
+    test(getFile("window/q3218.sql"));
   }
 
   @Test // DRILL-3220
   @Category(UnlikelyTest.class)
   public void testCountConst() throws Exception {
-    test(getFile("window/q3220.sql"), TEST_RES_PATH);
+    test(getFile("window/q3220.sql"));
   }
 
   @Test // DRILL-3604
   @Category(UnlikelyTest.class)
   public void testFix3604() throws Exception {
     // make sure the query doesn't fail
-    test(getFile("window/3604.sql"), TEST_RES_PATH);
+    test(getFile("window/3604.sql"));
   }
 
   @Test // DRILL-3605
   @Category(UnlikelyTest.class)
   public void testFix3605() throws Exception {
     testBuilder()
-      .sqlQuery(getFile("window/3605.sql"), TEST_RES_PATH)
+      .sqlQuery(getFile("window/3605.sql"))
       .ordered()
       .csvBaselineFile("window/3605.tsv")
       .baselineColumns("col2", "lead_col2")
@@ -242,7 +241,7 @@ public class TestWindowFrame extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   public void testFix3606() throws Exception {
     testBuilder()
-      .sqlQuery(getFile("window/3606.sql"), TEST_RES_PATH)
+      .sqlQuery(getFile("window/3606.sql"))
       .ordered()
       .csvBaselineFile("window/3606.tsv")
       .baselineColumns("col2", "lead_col2")
@@ -253,7 +252,7 @@ public class TestWindowFrame extends BaseTestQuery {
   @Test
   public void testLead() throws Exception {
     testBuilder()
-      .sqlQuery(getFile("window/lead.oby.sql"), TEST_RES_PATH)
+      .sqlQuery(getFile("window/lead.oby.sql"))
       .ordered()
       .csvBaselineFile("window/b4.p4.lead.oby.tsv")
       .baselineColumns("lead")
@@ -264,7 +263,7 @@ public class TestWindowFrame extends BaseTestQuery {
   @Test
   public void testLagWithPby() throws Exception {
     testBuilder()
-      .sqlQuery(getFile("window/lag.pby.oby.sql"), TEST_RES_PATH)
+      .sqlQuery(getFile("window/lag.pby.oby.sql"))
       .ordered()
       .csvBaselineFile("window/b4.p4.lag.pby.oby.tsv")
       .baselineColumns("lag")
@@ -276,7 +275,7 @@ public class TestWindowFrame extends BaseTestQuery {
   @Test
   public void testLag() throws Exception {
     testBuilder()
-      .sqlQuery(getFile("window/lag.oby.sql"), TEST_RES_PATH)
+      .sqlQuery(getFile("window/lag.oby.sql"))
       .ordered()
       .csvBaselineFile("window/b4.p4.lag.oby.tsv")
       .baselineColumns("lag")
@@ -287,7 +286,7 @@ public class TestWindowFrame extends BaseTestQuery {
   @Test
   public void testLeadWithPby() throws Exception {
     testBuilder()
-      .sqlQuery(getFile("window/lead.pby.oby.sql"), TEST_RES_PATH)
+      .sqlQuery(getFile("window/lead.pby.oby.sql"))
       .ordered()
       .csvBaselineFile("window/b4.p4.lead.pby.oby.tsv")
       .baselineColumns("lead")
@@ -298,7 +297,7 @@ public class TestWindowFrame extends BaseTestQuery {
   @Test
   public void testFirstValue() throws Exception {
     testBuilder()
-      .sqlQuery(getFile("window/fval.pby.sql"), TEST_RES_PATH)
+      .sqlQuery(getFile("window/fval.pby.sql"))
       .ordered()
       .csvBaselineFile("window/b4.p4.fval.pby.tsv")
       .baselineColumns("first_value")
@@ -309,7 +308,7 @@ public class TestWindowFrame extends BaseTestQuery {
   @Test
   public void testLastValue() throws Exception {
     testBuilder()
-      .sqlQuery(getFile("window/lval.pby.oby.sql"), TEST_RES_PATH)
+      .sqlQuery(getFile("window/lval.pby.oby.sql"))
       .ordered()
       .csvBaselineFile("window/b4.p4.lval.pby.oby.tsv")
       .baselineColumns("last_value")
@@ -320,19 +319,19 @@ public class TestWindowFrame extends BaseTestQuery {
   @Test
   public void testFirstValueAllTypes() throws Exception {
     // make sure all types are handled properly
-    test(getFile("window/fval.alltypes.sql"), TEST_RES_PATH);
+    test(getFile("window/fval.alltypes.sql"));
   }
 
   @Test
   public void testLastValueAllTypes() throws Exception {
     // make sure all types are handled properly
-    test(getFile("window/fval.alltypes.sql"), TEST_RES_PATH);
+    test(getFile("window/fval.alltypes.sql"));
   }
 
   @Test
   public void testNtile() throws Exception {
     testBuilder()
-      .sqlQuery(getFile("window/ntile.sql"), TEST_RES_PATH)
+      .sqlQuery(getFile("window/ntile.sql"))
       .ordered()
       .csvBaselineFile("window/b2.p4.ntile.tsv")
       .baselineColumns("ntile")
@@ -343,7 +342,7 @@ public class TestWindowFrame extends BaseTestQuery {
   @Test
   public void test3648Fix() throws Exception {
     testBuilder()
-      .sqlQuery(getFile("window/3648.sql"), TEST_RES_PATH)
+      .sqlQuery(getFile("window/3648.sql"))
       .ordered()
       .csvBaselineFile("window/3648.tsv")
       .baselineColumns("ntile")
@@ -353,13 +352,13 @@ public class TestWindowFrame extends BaseTestQuery {
 
   @Test
   public void test3654Fix() throws Exception {
-    test("SELECT FIRST_VALUE(col8) OVER(PARTITION BY col7 ORDER BY col8) FROM dfs_test.`%s/window/3648.parquet`", TEST_RES_PATH);
+    test("SELECT FIRST_VALUE(col8) OVER(PARTITION BY col7 ORDER BY col8) FROM dfs.`window/3648.parquet`");
   }
 
   @Test
   public void test3643Fix() throws Exception {
     try {
-      test("SELECT NTILE(0) OVER(PARTITION BY col7 ORDER BY col8) FROM dfs_test.`%s/window/3648.parquet`", TEST_RES_PATH);
+      test("SELECT NTILE(0) OVER(PARTITION BY col7 ORDER BY col8) FROM dfs.`window/3648.parquet`");
       fail("Query should have failed");
     } catch (UserRemoteException e) {
       assertEquals(ErrorType.FUNCTION, e.getErrorType());
@@ -369,7 +368,7 @@ public class TestWindowFrame extends BaseTestQuery {
   @Test
   public void test3668Fix() throws Exception {
     testBuilder()
-      .sqlQuery(getFile("window/3668.sql"), TEST_RES_PATH)
+      .sqlQuery(getFile("window/3668.sql"))
       .ordered()
       .baselineColumns("cnt").baselineValues(2L)
       .build()
@@ -379,20 +378,20 @@ public class TestWindowFrame extends BaseTestQuery {
   @Test
   public void testLeadParams() throws Exception {
     // make sure we only support default arguments for LEAD/LAG functions
-    final String query = "SELECT %s OVER(PARTITION BY col7 ORDER BY col8) FROM dfs_test.`%s/window/3648.parquet`";
+    final String query = "SELECT %s OVER(PARTITION BY col7 ORDER BY col8) FROM dfs.`window/3648.parquet`";
 
-    test(query, "LEAD(col8, 1)", TEST_RES_PATH);
-    test(query, "LAG(col8, 1)", TEST_RES_PATH);
+    test(query, "LEAD(col8, 1)");
+    test(query, "LAG(col8, 1)");
 
     try {
-      test(query, "LEAD(col8, 2)", TEST_RES_PATH);
+      test(query, "LEAD(col8, 2)");
       fail("query should fail");
     } catch (UserRemoteException e) {
       assertEquals(ErrorType.UNSUPPORTED_OPERATION, e.getErrorType());
     }
 
     try {
-      test(query, "LAG(col8, 2)", TEST_RES_PATH);
+      test(query, "LAG(col8, 2)");
       fail("query should fail");
     } catch (UserRemoteException e) {
       assertEquals(ErrorType.UNSUPPORTED_OPERATION, e.getErrorType());
@@ -435,12 +434,12 @@ public class TestWindowFrame extends BaseTestQuery {
 
   @Test
   public void test4457() throws Exception {
-    runSQL(String.format("CREATE TABLE dfs_test.tmp.`4457` AS " +
+    runSQL("CREATE TABLE dfs.tmp.`4457` AS " +
       "SELECT columns[0] AS c0, NULLIF(columns[1], 'null') AS c1 " +
-      "FROM dfs_test.`%s/window/4457.csv`", TEST_RES_PATH));
+      "FROM dfs.`window/4457.csv`");
 
     testBuilder()
-      .sqlQuery("SELECT COALESCE(FIRST_VALUE(c1) OVER(ORDER BY c0 RANGE BETWEEN CURRENT ROW AND CURRENT ROW), 'EMPTY') AS fv FROM dfs_test.tmp.`4457`")
+      .sqlQuery("SELECT COALESCE(FIRST_VALUE(c1) OVER(ORDER BY c0 RANGE BETWEEN CURRENT ROW AND CURRENT ROW), 'EMPTY') AS fv FROM dfs.tmp.`4457`")
       .ordered()
       .baselineColumns("fv")
       .baselineValues("a")
@@ -458,7 +457,7 @@ public class TestWindowFrame extends BaseTestQuery {
   @Category(UnlikelyTest.class)
   public void test4657() throws Exception {
     testBuilder()
-      .sqlQuery("select row_number() over(order by position_id) rn, rank() over(order by position_id) rnk from dfs_test.`%s/window/b3.p2`", TEST_RES_PATH)
+      .sqlQuery("select row_number() over(order by position_id) rn, rank() over(order by position_id) rnk from dfs.`window/b3.p2`")
       .ordered()
       .csvBaselineFile("window/4657.tsv")
       .baselineColumns("rn", "rnk")

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestCorruptParquetDateCorrection.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestCorruptParquetDateCorrection.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestCorruptParquetDateCorrection.java
index 4f139fc..1013511 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestCorruptParquetDateCorrection.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestCorruptParquetDateCorrection.java
@@ -21,16 +21,19 @@ import static java.lang.String.format;
 
 import org.apache.drill.categories.ParquetTest;
 import org.apache.drill.PlanTestBase;
-import org.apache.drill.TestBuilder;
+import org.apache.drill.test.TestBuilder;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.store.parquet.Metadata;
-import org.apache.hadoop.fs.Path;
 import org.joda.time.DateTime;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.io.File;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
 /**
  * Tests for compatibility reading old parquet files after date corruption
  * issue was fixed in DRILL-4203.
@@ -64,7 +67,7 @@ import org.junit.experimental.categories.Category;
  */
 @Category({ParquetTest.class, UnlikelyTest.class})
 public class TestCorruptParquetDateCorrection extends PlanTestBase {
-
+  private static final Path PARQUET_4203 = Paths.get("parquet", "4203_corrupt_dates");
   // 4 files are in the directory:
   //    - one created with the parquet-writer version number of "2"
   //        - files have extra meta field: parquet-writer.version = 2
@@ -75,44 +78,39 @@ public class TestCorruptParquetDateCorrection extends PlanTestBase {
   //    - one from the 0.6 version of Drill, before files had min/max statistics
   //        - detecting corrupt values must be deferred to actual data page reading
   //    - one from 1.4, where there is a proper created-by, but the corruption is present
-  private static final String MIXED_CORRUPTED_AND_CORRECT_DATES_PATH =
-      "[WORKING_PATH]/src/test/resources/parquet/4203_corrupt_dates/mixed_drill_versions";
+  private static final Path MIXED_CORRUPTED_AND_CORRECT_DATES_PATH = PARQUET_4203.resolve("mixed_drill_versions");
   // partitioned with 1.2.0, no certain metadata that these were written with Drill
   // the value will be checked to see that they look corrupt and they will be corrected
   // by default. Users can use the format plugin option autoCorrectCorruptDates to disable
   // this behavior if they have foreign parquet files with valid rare date values that are
   // in the similar range as Drill's corrupt values
-  private static final String CORRUPTED_PARTITIONED_DATES_1_2_PATH =
-      "[WORKING_PATH]/src/test/resources/parquet/4203_corrupt_dates/partitioned_with_corruption_4203_1_2";
+  private static final Path PARTITIONED_1_2_FOLDER = Paths.get("partitioned_with_corruption_4203_1_2");
+  private static final Path CORRUPTED_PARTITIONED_DATES_1_2_PATH = PARQUET_4203.resolve(PARTITIONED_1_2_FOLDER);
   // partitioned with 1.4.0, no certain metadata regarding the date corruption status.
   // The same detection approach of the corrupt date values as for the files partitioned with 1.2.0
-  private static final String CORRUPTED_PARTITIONED_DATES_1_4_0_PATH =
-      "[WORKING_PATH]/src/test/resources/parquet/4203_corrupt_dates/partitioned_with_corruption_4203";
-  private static final String PARQUET_DATE_FILE_WITH_NULL_FILLED_COLS =
-      "[WORKING_PATH]/src/test/resources/parquet/4203_corrupt_dates/null_date_cols_with_corruption_4203.parquet";
-  private static final String CORRECT_PARTITIONED_DATES_1_9_PATH =
-      "[WORKING_PATH]/src/test/resources/parquet/4203_corrupt_dates/1_9_0_partitioned_no_corruption";
-  private static final String VARCHAR_PARTITIONED =
-      "[WORKING_PATH]/src/test/resources/parquet/4203_corrupt_dates/fewtypes_varcharpartition";
-  private static final String DATE_PARTITIONED =
-      "[WORKING_PATH]/src/test/resources/parquet/4203_corrupt_dates/fewtypes_datepartition";
-  private static final String EXCEPTION_WHILE_PARSING_CREATED_BY_META =
-      "[WORKING_PATH]/src/test/resources/parquet/4203_corrupt_dates/hive1dot2_fewtypes_null";
-  private static final String CORRECT_DATES_1_6_0_PATH =
-      "[WORKING_PATH]/src/test/resources/parquet/4203_corrupt_dates/correct_dates_and_old_drill_parquet_writer.parquet";
-  private static final String PARTITIONED_1_2_FOLDER = "partitioned_with_corruption_4203_1_2";
-  private static final String MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER = "mixed_partitioned";
-
+  private static final Path PARTITIONED_1_4_FOLDER = Paths.get("partitioned_with_corruption_4203");
+  private static final Path CORRUPTED_PARTITIONED_DATES_1_4_0_PATH = PARQUET_4203.resolve(PARTITIONED_1_4_FOLDER);
+  private static final Path PARQUET_DATE_FILE_WITH_NULL_FILLED_COLS = PARQUET_4203.resolve("null_date_cols_with_corruption_4203.parquet");
+  private static final Path PARTITIONED_1_9_FOLDER = Paths.get("1_9_0_partitioned_no_corruption");
+  private static final Path CORRECT_PARTITIONED_DATES_1_9_PATH = PARQUET_4203.resolve(PARTITIONED_1_9_FOLDER);
+  private static final Path VARCHAR_PARTITIONED = PARQUET_4203.resolve("fewtypes_varcharpartition");
+  private static final Path DATE_PARTITIONED = PARQUET_4203.resolve("fewtypes_datepartition");
+  private static final Path EXCEPTION_WHILE_PARSING_CREATED_BY_META = PARQUET_4203.resolve("hive1dot2_fewtypes_null");
+  private static final Path CORRECT_DATES_1_6_0_PATH = PARQUET_4203.resolve("correct_dates_and_old_drill_parquet_writer.parquet");
+  private static final Path MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER = Paths.get("mixed_partitioned");
 
   @BeforeClass
   public static void initFs() throws Exception {
     // Move files into temp directory, rewrite the metadata cache file to contain the appropriate absolute path
-    copyDirectoryIntoTempSpace(CORRUPTED_PARTITIONED_DATES_1_2_PATH);
-    copyMetaDataCacheToTempReplacingInternalPaths("parquet/4203_corrupt_dates/drill.parquet.metadata_1_2.requires_replace.txt",
-        PARTITIONED_1_2_FOLDER, Metadata.METADATA_FILENAME);
-    copyDirectoryIntoTempSpace(CORRUPTED_PARTITIONED_DATES_1_2_PATH, MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER);
-    copyDirectoryIntoTempSpace(CORRECT_PARTITIONED_DATES_1_9_PATH, MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER);
-    copyDirectoryIntoTempSpace(CORRUPTED_PARTITIONED_DATES_1_4_0_PATH, MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER);
+    dirTestWatcher.copyResourceToRoot(PARQUET_4203);
+    dirTestWatcher.copyResourceToRoot(CORRUPTED_PARTITIONED_DATES_1_2_PATH, PARTITIONED_1_2_FOLDER);
+    dirTestWatcher.copyResourceToRoot(CORRUPTED_PARTITIONED_DATES_1_4_0_PATH, MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER);
+    dirTestWatcher.copyResourceToRoot(CORRUPTED_PARTITIONED_DATES_1_2_PATH, MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER.resolve(PARTITIONED_1_2_FOLDER));
+    dirTestWatcher.copyResourceToRoot(CORRECT_PARTITIONED_DATES_1_9_PATH, MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER.resolve(PARTITIONED_1_9_FOLDER));
+    dirTestWatcher.copyResourceToRoot(CORRUPTED_PARTITIONED_DATES_1_4_0_PATH, MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER.resolve(PARTITIONED_1_4_FOLDER));
+    File metaData = dirTestWatcher.copyResourceToRoot(PARQUET_4203.resolve("drill.parquet.metadata_1_2.requires_replace.txt"),
+      PARTITIONED_1_2_FOLDER.resolve(Metadata.METADATA_FILENAME));
+    dirTestWatcher.replaceMetaDataContents(metaData, dirTestWatcher.getRootDir(), null);
   }
 
   /**
@@ -190,12 +188,11 @@ public class TestCorruptParquetDateCorrection extends PlanTestBase {
         .go();
   }
 
-
   @Test
   public void testReadPartitionedOnCorruptedDates_UserDisabledCorrection() throws Exception {
     try {
       for (String selection : new String[]{"*", "date_col"}) {
-        for (String table : new String[]{CORRUPTED_PARTITIONED_DATES_1_2_PATH, CORRUPTED_PARTITIONED_DATES_1_4_0_PATH}) {
+        for (Path table : new Path[]{CORRUPTED_PARTITIONED_DATES_1_2_PATH, CORRUPTED_PARTITIONED_DATES_1_4_0_PATH}) {
           // for sanity, try reading all partitions without a filter
           TestBuilder builder = testBuilder()
               .sqlQuery("select %s from table(dfs.`%s` (type => 'parquet', autoCorrectCorruptDates => false))",
@@ -228,7 +225,7 @@ public class TestCorruptParquetDateCorrection extends PlanTestBase {
   public void testCorruptValueDetectionDuringPruning() throws Exception {
     try {
       for (String selection : new String[]{"*", "date_col"}) {
-        for (String table : new String[]{CORRUPTED_PARTITIONED_DATES_1_2_PATH, CORRUPTED_PARTITIONED_DATES_1_4_0_PATH}) {
+        for (Path table : new Path[]{CORRUPTED_PARTITIONED_DATES_1_2_PATH, CORRUPTED_PARTITIONED_DATES_1_4_0_PATH}) {
           // for sanity, try reading all partitions without a filter
           TestBuilder builder = testBuilder()
               .sqlQuery("select %s from dfs.`%s`", selection, table)
@@ -323,7 +320,7 @@ public class TestCorruptParquetDateCorrection extends PlanTestBase {
   @Test
   public void testReadOldMetadataCacheFile() throws Exception {
     // for sanity, try reading all partitions without a filter
-    String query = format("select date_col from dfs.`%s`", new Path(getDfsTestTmpSchemaLocation(), PARTITIONED_1_2_FOLDER));
+    String query = format("select date_col from dfs.`%s`", PARTITIONED_1_2_FOLDER);
     TestBuilder builder = testBuilder()
         .sqlQuery(query)
         .unOrdered()
@@ -336,7 +333,7 @@ public class TestCorruptParquetDateCorrection extends PlanTestBase {
   @Test
   public void testReadOldMetadataCacheFileWithPruning() throws Exception {
     String query = format("select date_col from dfs.`%s` where date_col = date '1970-01-01'",
-        new Path(getDfsTestTmpSchemaLocation(), PARTITIONED_1_2_FOLDER));
+      PARTITIONED_1_2_FOLDER);
     // verify that pruning is actually taking place
     testPlanMatchingPatterns(query, new String[]{"numFiles=1", "usedMetadataFile=true"}, null);
 
@@ -354,7 +351,7 @@ public class TestCorruptParquetDateCorrection extends PlanTestBase {
     // for sanity, try reading all partitions without a filter
     TestBuilder builder = testBuilder()
         .sqlQuery("select date_col from table(dfs.`%s` (type => 'parquet', autoCorrectCorruptDates => false))",
-            new Path(getDfsTestTmpSchemaLocation(), PARTITIONED_1_2_FOLDER))
+          PARTITIONED_1_2_FOLDER)
         .unOrdered()
         .baselineColumns("date_col");
     addCorruptedDateBaselineValues(builder);
@@ -362,7 +359,7 @@ public class TestCorruptParquetDateCorrection extends PlanTestBase {
 
     String query = format("select date_col from table(dfs.`%s` (type => 'parquet', " +
         "autoCorrectCorruptDates => false)) where date_col = cast('15334-03-17' as date)",
-        new Path(getDfsTestTmpSchemaLocation(), PARTITIONED_1_2_FOLDER));
+      PARTITIONED_1_2_FOLDER);
     // verify that pruning is actually taking place
     testPlanMatchingPatterns(query, new String[]{"numFiles=1", "usedMetadataFile=true"}, null);
 
@@ -377,13 +374,13 @@ public class TestCorruptParquetDateCorrection extends PlanTestBase {
 
   @Test
   public void testReadNewMetadataCacheFileOverOldAndNewFiles() throws Exception {
-    String table = format("dfs.`%s`", new Path(getDfsTestTmpSchemaLocation(), MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER));
-    copyMetaDataCacheToTempReplacingInternalPaths(
-        "parquet/4203_corrupt_dates/mixed_version_partitioned_metadata.requires_replace.txt",
-        MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER, Metadata.METADATA_FILENAME);
+    File meta = dirTestWatcher.copyResourceToRoot(
+       PARQUET_4203.resolve("mixed_version_partitioned_metadata.requires_replace.txt"),
+       MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER.resolve(Metadata.METADATA_FILENAME));
+    dirTestWatcher.replaceMetaDataContents(meta, dirTestWatcher.getRootDir(), null);
     // for sanity, try reading all partitions without a filter
     TestBuilder builder = testBuilder()
-        .sqlQuery("select date_col from " + table)
+        .sqlQuery("select date_col from dfs.`%s`", MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER)
         .unOrdered()
         .baselineColumns("date_col");
     addDateBaselineValues(builder);
@@ -391,7 +388,7 @@ public class TestCorruptParquetDateCorrection extends PlanTestBase {
     addDateBaselineValues(builder);
     builder.go();
 
-    String query = format("select date_col from %s where date_col = date '1970-01-01'", table);
+    String query = format("select date_col from dfs.`%s` where date_col = date '1970-01-01'", MIXED_CORRUPTED_AND_CORRECT_PARTITIONED_FOLDER);
     // verify that pruning is actually taking place
     testPlanMatchingPatterns(query, new String[]{"numFiles=3", "usedMetadataFile=true"}, null);
 
@@ -436,7 +433,6 @@ public class TestCorruptParquetDateCorrection extends PlanTestBase {
     }
   }
 
-
   private void addDateBaselineValues(TestBuilder builder) {
     builder
         .baselineValues(new DateTime(1970, 1, 1, 0, 0))
@@ -477,5 +473,4 @@ public class TestCorruptParquetDateCorrection extends PlanTestBase {
       builder.go();
     }
   }
-
 }


Mime
View raw message