drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From prog...@apache.org
Subject [11/22] drill git commit: DRILL-5783, DRILL-5841, DRILL-5894: Rationalize test temp directories
Date Wed, 15 Nov 2017 01:46:57 GMT
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
index 9715d14..e3fc833 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriter.java
@@ -17,14 +17,15 @@
  */
 package org.apache.drill.exec.physical.impl.writer;
 
-import static org.apache.drill.TestBuilder.convertToLocalTimestamp;
 import static org.apache.drill.exec.store.parquet.ParquetRecordWriter.DRILL_VERSION_PROPERTY;
+import static org.apache.drill.test.TestBuilder.convertToLocalTimestamp;
 import static org.apache.parquet.format.converter.ParquetMetadataConverter.SKIP_ROW_GROUPS;
 import static org.junit.Assert.assertEquals;
 
 import java.io.File;
 import java.io.FileWriter;
 import java.math.BigDecimal;
+import java.nio.file.Paths;
 import java.sql.Date;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -33,13 +34,13 @@ import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.drill.BaseTestQuery;
+import com.google.common.base.Joiner;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.ParquetTest;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.types.TypeProtos;
 import org.apache.drill.common.util.DrillVersionInfo;
-import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.fn.interp.TestConstantFolding;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
@@ -54,26 +55,24 @@ import org.joda.time.Period;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Ignore;
-import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.rules.TemporaryFolder;
 import org.junit.runner.RunWith;
 import org.junit.runners.Parameterized;
 
-import com.google.common.base.Joiner;
-
 @RunWith(Parameterized.class)
 @Category({SlowTest.class, ParquetTest.class})
 public class TestParquetWriter extends BaseTestQuery {
-
   @Parameterized.Parameters
   public static Collection<Object[]> data() {
     return Arrays.asList(new Object[][] { {100} });
   }
 
-  @Rule
-  public TemporaryFolder folder = new TemporaryFolder();
+  @BeforeClass
+  public static void setupTestFiles() {
+    dirTestWatcher.copyResourceToRoot(Paths.get("parquet", "int96_dict_change"));
+  }
+
   static FileSystem fs;
 
   // Map storing a convenient name as well as the cast type necessary
@@ -116,8 +115,7 @@ public class TestParquetWriter extends BaseTestQuery {
     allTypesSelection = Joiner.on(",").join(allTypeSelectsAndCasts);
   }
 
-
-  private String allTypesTable = "cp.`/parquet/alltypes.json`";
+  private String allTypesTable = "cp.`parquet/alltypes.json`";
 
   @Parameterized.Parameter
   public int repeat = 1;
@@ -136,7 +134,7 @@ public class TestParquetWriter extends BaseTestQuery {
   @Test
   public void testSmallFileValueReadWrite() throws Exception {
     String selection = "key";
-    String inputTable = "cp.`/store/json/intData.json`";
+    String inputTable = "cp.`store/json/intData.json`";
     runTestAndValidate(selection, selection, inputTable, "smallFileTest");
   }
 
@@ -166,14 +164,17 @@ public class TestParquetWriter extends BaseTestQuery {
     colNames[numCols - 1] = "col_" + (numCols - 1);
     values[numCols - 1] = 100l;
 
+    String path = "test";
+    File pathDir = dirTestWatcher.makeRootSubDir(Paths.get(path));
+
     // write it to a file in the temp directory for the test
-    new TestConstantFolding.SmallFileCreator(folder).setRecord(sb.toString()).createFiles(1, 1, "json");
+    new TestConstantFolding.SmallFileCreator(pathDir)
+      .setRecord(sb.toString()).createFiles(1, 1, "json");
 
-    String path = folder.getRoot().toPath().toString();
-    test("use dfs_test.tmp");
-    test("create table WIDE_PARQUET_TABLE_TestParquetWriter_testLargeFooter as select * from dfs.`" + path + "/smallfile/smallfile.json`");
+    test("use dfs.tmp");
+    test("create table WIDE_PARQUET_TABLE_TestParquetWriter_testLargeFooter as select * from dfs.`%s/smallfile/smallfile.json`", path);
     testBuilder()
-        .sqlQuery("select * from dfs_test.tmp.WIDE_PARQUET_TABLE_TestParquetWriter_testLargeFooter")
+        .sqlQuery("select * from dfs.tmp.WIDE_PARQUET_TABLE_TestParquetWriter_testLargeFooter")
         .unOrdered()
         .baselineColumns(colNames)
         .baselineValues(values)
@@ -377,7 +378,7 @@ public class TestParquetWriter extends BaseTestQuery {
   @Ignore("Test file not available")
   @Test
   public void testBitError_Drill_2031() throws Exception {
-    compareParquetReadersHyperVector("*", "dfs.`/tmp/wide2/0_0_3.parquet`");
+    compareParquetReadersHyperVector("*", "dfs.`tmp/wide2/0_0_3.parquet`");
   }
 
   @Test
@@ -445,7 +446,7 @@ public class TestParquetWriter extends BaseTestQuery {
     String queryFromWriteOut = "select * from " + outputFile;
 
     try {
-      test("use dfs_test.tmp");
+      test("use dfs.tmp");
       test(ctasStmt);
       testBuilder()
           .ordered()
@@ -495,82 +496,82 @@ public class TestParquetWriter extends BaseTestQuery {
   @Ignore("Binary file too large for version control")
   @Test
   public void testReadVoter() throws Exception {
-    compareParquetReadersHyperVector("*", "dfs.`/tmp/voter.parquet`");
+    compareParquetReadersHyperVector("*", "dfs.`tmp/voter.parquet`");
   }
 
   @Ignore("Test file not available")
   @Test
   public void testReadSf_100_supplier() throws Exception {
-    compareParquetReadersHyperVector("*", "dfs.`/tmp/sf100_supplier.parquet`");
+    compareParquetReadersHyperVector("*", "dfs.`tmp/sf100_supplier.parquet`");
   }
 
   @Ignore("Binary file too large for version control")
   @Test
   public void testParquetRead_checkNulls_NullsFirst() throws Exception {
     compareParquetReadersColumnar("*",
-        "dfs.`/tmp/parquet_with_nulls_should_sum_100000_nulls_first.parquet`");
+        "dfs.`tmp/parquet_with_nulls_should_sum_100000_nulls_first.parquet`");
   }
 
   @Ignore("Test file not available")
   @Test
   public void testParquetRead_checkNulls() throws Exception {
-    compareParquetReadersColumnar("*", "dfs.`/tmp/parquet_with_nulls_should_sum_100000.parquet`");
+    compareParquetReadersColumnar("*", "dfs.`tmp/parquet_with_nulls_should_sum_100000.parquet`");
   }
 
   @Ignore("Binary file too large for version control")
   @Test
   public void test958_sql() throws Exception {
-    compareParquetReadersHyperVector("ss_ext_sales_price", "dfs.`/tmp/store_sales`");
+    compareParquetReadersHyperVector("ss_ext_sales_price", "dfs.`tmp/store_sales`");
   }
 
   @Ignore("Binary file too large for version control")
   @Test
   public void testReadSf_1_supplier() throws Exception {
-    compareParquetReadersHyperVector("*", "dfs.`/tmp/orders_part-m-00001.parquet`");
+    compareParquetReadersHyperVector("*", "dfs.`tmp/orders_part-m-00001.parquet`");
   }
 
   @Ignore("Binary file too large for version control")
   @Test
   public void test958_sql_all_columns() throws Exception {
-    compareParquetReadersHyperVector("*", "dfs.`/tmp/store_sales`");
-    compareParquetReadersHyperVector("ss_addr_sk, ss_hdemo_sk", "dfs.`/tmp/store_sales`");
+    compareParquetReadersHyperVector("*", "dfs.`tmp/store_sales`");
+    compareParquetReadersHyperVector("ss_addr_sk, ss_hdemo_sk", "dfs.`tmp/store_sales`");
     // TODO - Drill 1388 - this currently fails, but it is an issue with project, not the reader, pulled out the physical plan
     // removed the unneeded project in the plan and ran it against both readers, they outputs matched
 //    compareParquetReadersHyperVector("pig_schema,ss_sold_date_sk,ss_item_sk,ss_cdemo_sk,ss_addr_sk, ss_hdemo_sk",
-//        "dfs.`/tmp/store_sales`");
+//        "dfs.`tmp/store_sales`");
   }
 
   @Ignore("Binary file too large for version control")
   @Test
   public void testDrill_1314() throws Exception {
-    compareParquetReadersColumnar("l_partkey ", "dfs.`/tmp/drill_1314.parquet`");
+    compareParquetReadersColumnar("l_partkey ", "dfs.`tmp/drill_1314.parquet`");
   }
 
   @Ignore("Binary file too large for version control")
   @Test
   public void testDrill_1314_all_columns() throws Exception {
-    compareParquetReadersHyperVector("*", "dfs.`/tmp/drill_1314.parquet`");
+    compareParquetReadersHyperVector("*", "dfs.`tmp/drill_1314.parquet`");
     compareParquetReadersColumnar(
         "l_orderkey,l_partkey,l_suppkey,l_linenumber, l_quantity, l_extendedprice,l_discount,l_tax",
-        "dfs.`/tmp/drill_1314.parquet`");
+        "dfs.`tmp/drill_1314.parquet`");
   }
 
   @Ignore("Test file not available")
   @Test
   public void testParquetRead_checkShortNullLists() throws Exception {
-    compareParquetReadersColumnar("*", "dfs.`/tmp/short_null_lists.parquet`");
+    compareParquetReadersColumnar("*", "dfs.`tmp/short_null_lists.parquet`");
   }
 
   @Ignore("Test file not available")
   @Test
   public void testParquetRead_checkStartWithNull() throws Exception {
-    compareParquetReadersColumnar("*", "dfs.`/tmp/start_with_null.parquet`");
+    compareParquetReadersColumnar("*", "dfs.`tmp/start_with_null.parquet`");
   }
 
   @Ignore("Binary file too large for version control")
   @Test
   public void testParquetReadWebReturns() throws Exception {
-    compareParquetReadersColumnar("wr_returning_customer_sk", "dfs.`/tmp/web_returns`");
+    compareParquetReadersColumnar("wr_returning_customer_sk", "dfs.`tmp/web_returns`");
   }
 
   @Test
@@ -578,18 +579,16 @@ public class TestParquetWriter extends BaseTestQuery {
     String outputTable = "decimal_test";
 
     try {
-      String ctas = String.format("use dfs_test.tmp; " +
-          "create table %s as select " +
-          "cast('1.2' as decimal(38, 2)) col1, cast('1.2' as decimal(28, 2)) col2 " +
-          "from cp.`employee.json` limit 1", outputTable);
-
-      test(ctas);
+      test("use dfs.tmp; " +
+        "create table %s as select " +
+        "cast('1.2' as decimal(38, 2)) col1, cast('1.2' as decimal(28, 2)) col2 " +
+        "from cp.`employee.json` limit 1", outputTable);
 
       BigDecimal result = new BigDecimal("1.20");
 
       testBuilder()
           .unOrdered()
-          .sqlQuery(String.format("select col1, col2 from %s ", outputTable))
+          .sqlQuery("select col1, col2 from %s ", outputTable)
           .baselineColumns("col1", "col2")
           .baselineValues(result, result)
           .go();
@@ -604,17 +603,15 @@ public class TestParquetWriter extends BaseTestQuery {
     final String newTblName = "testTableOutputSchema";
 
     try {
-      final String ctas = String.format("CREATE TABLE dfs_test.tmp.%s(id, name, bday) AS SELECT " +
-          "cast(`employee_id` as integer), " +
-          "cast(`full_name` as varchar(100)), " +
-          "cast(`birth_date` as date) " +
-          "FROM cp.`employee.json` ORDER BY `employee_id` LIMIT 1", newTblName);
-
-      test(ctas);
+      test("CREATE TABLE dfs.tmp.%s(id, name, bday) AS SELECT " +
+        "cast(`employee_id` as integer), " +
+        "cast(`full_name` as varchar(100)), " +
+        "cast(`birth_date` as date) " +
+        "FROM cp.`employee.json` ORDER BY `employee_id` LIMIT 1", newTblName);
 
       testBuilder()
           .unOrdered()
-          .sqlQuery(String.format("SELECT * FROM dfs_test.tmp.`%s`", newTblName))
+          .sqlQuery("SELECT * FROM dfs.tmp.`%s`", newTblName)
           .baselineColumns("id", "name", "bday")
           .baselineValues(1, "Sheri Nowmer", new DateTime(Date.valueOf("1961-08-26").getTime()))
           .go();
@@ -629,15 +626,15 @@ public class TestParquetWriter extends BaseTestQuery {
  */
   @Test
   public void testCTASWithIntervalTypes() throws Exception {
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
 
     String tableName = "drill_1980_t1";
     // test required interval day type
-    test(String.format("create table %s as " +
+    test("create table %s as " +
         "select " +
         "interval '10 20:30:40.123' day to second col1, " +
         "interval '-1000000000 20:12:23.999' day(10) to second col2 " +
-        "from cp.`employee.json` limit 2", tableName));
+        "from cp.`employee.json` limit 2", tableName);
 
     Period row1Col1 = new Period(0, 0, 0, 10, 0, 0, 0, 73840123);
     Period row1Col2 = new Period(0, 0, 0, -1000000000, 0, 0, 0, -72743999);
@@ -646,11 +643,11 @@ public class TestParquetWriter extends BaseTestQuery {
     tableName = "drill_1980_2";
 
     // test required interval year type
-    test(String.format("create table %s as " +
+    test("create table %s as " +
         "select " +
         "interval '10-2' year to month col1, " +
         "interval '-100-8' year(3) to month col2 " +
-        "from cp.`employee.json` limit 2", tableName));
+        "from cp.`employee.json` limit 2", tableName);
 
     row1Col1 = new Period(0, 122, 0, 0, 0, 0, 0, 0);
     row1Col2 = new Period(0, -1208, 0, 0, 0, 0, 0, 0);
@@ -658,11 +655,11 @@ public class TestParquetWriter extends BaseTestQuery {
     testParquetReaderHelper(tableName, row1Col1, row1Col2, row1Col1, row1Col2);
     // test nullable interval year type
     tableName = "drill_1980_t3";
-    test(String.format("create table %s as " +
+    test("create table %s as " +
         "select " +
         "cast (intervalyear_col as interval year) col1," +
         "cast(intervalyear_col as interval year) + interval '2' year col2 " +
-        "from cp.`parquet/alltypes.json` where tinyint_col = 1 or tinyint_col = 2", tableName));
+        "from cp.`parquet/alltypes.json` where tinyint_col = 1 or tinyint_col = 2", tableName);
 
     row1Col1 = new Period(0, 12, 0, 0, 0, 0, 0, 0);
     row1Col2 = new Period(0, 36, 0, 0, 0, 0, 0, 0);
@@ -673,11 +670,11 @@ public class TestParquetWriter extends BaseTestQuery {
 
     // test nullable interval day type
     tableName = "drill_1980_t4";
-    test(String.format("create table %s as " +
+    test("create table %s as " +
         "select " +
         "cast(intervalday_col as interval day) col1, " +
         "cast(intervalday_col as interval day) + interval '1' day col2 " +
-        "from cp.`parquet/alltypes.json` where tinyint_col = 1 or tinyint_col = 2", tableName));
+        "from cp.`parquet/alltypes.json` where tinyint_col = 1 or tinyint_col = 2", tableName);
 
     row1Col1 = new Period(0, 0, 0, 1, 0, 0, 0, 0);
     row1Col2 = new Period(0, 0, 0, 2, 0, 0, 0, 0);
@@ -716,7 +713,7 @@ public class TestParquetWriter extends BaseTestQuery {
 
   private static void deleteTableIfExists(String tableName) {
     try {
-      Path path = new Path(getDfsTestTmpSchemaLocation(), tableName);
+      Path path = new Path(dirTestWatcher.getDfsTestTmpDir().getAbsolutePath(), tableName);
       if (fs.exists(path)) {
         fs.delete(path, true);
       }
@@ -728,24 +725,22 @@ public class TestParquetWriter extends BaseTestQuery {
   public void runTestAndValidate(String selection, String validationSelection, String inputTable, String outputFile) throws Exception {
     try {
       deleteTableIfExists(outputFile);
-      test("use dfs_test.tmp");
-      String query = String.format("SELECT %s FROM %s", selection, inputTable);
-      String create = "CREATE TABLE " + outputFile + " AS " + query;
-      String validateQuery = String.format("SELECT %s FROM " + outputFile, validationSelection);
-      test(create);
 
+      final String query = String.format("SELECT %s FROM %s", selection, inputTable);
+
+      test("use dfs.tmp");
+      test("CREATE TABLE %s AS %s", outputFile, query);
       testBuilder()
           .unOrdered()
-          // Validate query is the query on the output file (the one to validate)
-          .sqlQuery(validateQuery)
-          // The basline query is the query on the input file (the expected values)
-          .sqlBaselineQuery(query)
+          .sqlQuery(query)
+          .sqlBaselineQuery("SELECT %s FROM %s", validationSelection, outputFile)
           .go();
 
       Configuration hadoopConf = new Configuration();
       hadoopConf.set(FileSystem.FS_DEFAULT_NAME_KEY, FileSystem.DEFAULT_FS);
-      Path output = new Path(getDfsTestTmpSchemaLocation(), outputFile);
+
       @SuppressWarnings("resource")
+      Path output = new Path(dirTestWatcher.getDfsTestTmpDir().getAbsolutePath(), outputFile);
       FileSystem fs = output.getFileSystem(hadoopConf);
       for (FileStatus file : fs.listStatus(output)) {
         ParquetMetadata footer = ParquetFileReader.readFooter(hadoopConf, file, SKIP_ROW_GROUPS);
@@ -787,11 +782,9 @@ public class TestParquetWriter extends BaseTestQuery {
   @Test
   @Ignore("relies on particular time zone, works for UTC")
   public void testImpalaParquetBinaryAsTimeStamp_DictChange() throws Exception {
-    final String WORKING_PATH = TestTools.getWorkingPath();
-    final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources";
     try {
       testBuilder()
-          .sqlQuery("select int96_ts from dfs_test.`%s/parquet/int96_dict_change` order by int96_ts", TEST_RES_PATH)
+          .sqlQuery("select int96_ts from dfs.`parquet/int96_dict_change` order by int96_ts")
           .optionSettingQueriesForTestQuery(
               "alter session set `%s` = true", ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP)
           .ordered()
@@ -851,12 +844,10 @@ public class TestParquetWriter extends BaseTestQuery {
   @Test
   @Ignore("relies on particular time zone")
   public void testHiveParquetTimestampAsInt96_basic() throws Exception {
-    final String q = "SELECT cast(convert_from(timestamp_field, 'TIMESTAMP_IMPALA') as varchar(19))  as timestamp_field "
-            + "from cp.`parquet/part1/hive_all_types.parquet` ";
-
     testBuilder()
             .unOrdered()
-            .sqlQuery(q)
+            .sqlQuery("SELECT cast(convert_from(timestamp_field, 'TIMESTAMP_IMPALA') as varchar(19))  as timestamp_field "
+              + "from cp.`parquet/part1/hive_all_types.parquet` ")
             .baselineColumns("timestamp_field")
             .baselineValues("2013-07-05 17:01:00")
             .baselineValues((Object)null)
@@ -999,6 +990,5 @@ public class TestParquetWriter extends BaseTestQuery {
       resetSessionOption(ExecConstants.PARQUET_READER_INT96_AS_TIMESTAMP);
     }
   }
-
 }
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriterEmptyFiles.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriterEmptyFiles.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriterEmptyFiles.java
index 2170f41..bc72234 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriterEmptyFiles.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestParquetWriterEmptyFiles.java
@@ -17,41 +17,33 @@
  */
 package org.apache.drill.exec.physical.impl.writer;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.commons.io.FileUtils;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.ParquetTest;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.test.OperatorFixture;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.io.File;
+
 @Category({ParquetTest.class, UnlikelyTest.class})
 public class TestParquetWriterEmptyFiles extends BaseTestQuery {
 
-  private static FileSystem fs;
-
   @BeforeClass
   public static void initFs() throws Exception {
-    fs = getLocalFileSystem();
     updateTestCluster(3, null);
   }
 
   @Test // see DRILL-2408
   public void testWriteEmptyFile() throws Exception {
-    final String outputFile = "testparquetwriteremptyfiles_testwriteemptyfile";
-
-    try {
-      test("CREATE TABLE dfs_test.tmp.%s AS SELECT * FROM cp.`employee.json` WHERE 1=0", outputFile);
+    final String outputFileName = "testparquetwriteremptyfiles_testwriteemptyfile";
+    final File outputFile = FileUtils.getFile(dirTestWatcher.getDfsTestTmpDir(), outputFileName);
 
-      final Path path = new Path(getDfsTestTmpSchemaLocation(), outputFile);
-      Assert.assertFalse(fs.exists(path));
-    } finally {
-      deleteTableIfExists(outputFile);
-    }
+    test("CREATE TABLE dfs.tmp.%s AS SELECT * FROM cp.`employee.json` WHERE 1=0", outputFileName);
+    Assert.assertFalse(outputFile.exists());
   }
 
   @Test
@@ -62,24 +54,23 @@ public class TestParquetWriterEmptyFiles extends BaseTestQuery {
 
     try {
       final String query = "SELECT position_id FROM cp.`employee.json` WHERE position_id IN (15, 16) GROUP BY position_id";
-      test("CREATE TABLE dfs_test.tmp.%s AS %s", outputFile, query);
+
+      test("CREATE TABLE dfs.tmp.%s AS %s", outputFile, query);
 
       // this query will fail if an "empty" file was created
       testBuilder()
         .unOrdered()
-        .sqlQuery("SELECT * FROM dfs_test.tmp.%s", outputFile)
+        .sqlQuery("SELECT * FROM dfs.tmp.%s", outputFile)
         .sqlBaselineQuery(query)
         .go();
     } finally {
       runSQL("alter session set `planner.slice_target` = " + ExecConstants.SLICE_TARGET_DEFAULT);
-      deleteTableIfExists(outputFile);
     }
   }
 
   @Test // see DRILL-2408
   public void testWriteEmptyFileAfterFlush() throws Exception {
     final String outputFile = "testparquetwriteremptyfiles_test_write_empty_file_after_flush";
-    deleteTableIfExists(outputFile);
 
     try {
       // this specific value will force a flush just after the final row is written
@@ -87,32 +78,17 @@ public class TestParquetWriterEmptyFiles extends BaseTestQuery {
       test("ALTER SESSION SET `store.parquet.block-size` = 19926");
 
       final String query = "SELECT * FROM cp.`employee.json` LIMIT 100";
-      test("CREATE TABLE dfs_test.tmp.%s AS %s", outputFile, query);
+      test("CREATE TABLE dfs.tmp.%s AS %s", outputFile, query);
 
       // this query will fail if an "empty" file was created
       testBuilder()
         .unOrdered()
-        .sqlQuery("SELECT * FROM dfs_test.tmp.%s", outputFile)
+        .sqlQuery("SELECT * FROM dfs.tmp.%s", outputFile)
         .sqlBaselineQuery(query)
         .go();
     } finally {
       // restore the session option
       resetSessionOption(ExecConstants.PARQUET_BLOCK_SIZE);
-      deleteTableIfExists(outputFile);
-    }
-  }
-
-  private static boolean deleteTableIfExists(String tableName) {
-    try {
-      Path path = new Path(getDfsTestTmpSchemaLocation(), tableName);
-      if (fs.exists(path)) {
-        return fs.delete(path, true);
-      }
-    } catch (Exception e) {
-      // ignore exceptions.
-      return false;
     }
-
-    return true;
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
index 9f93eed..4dc2b96 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/writer/TestWriter.java
@@ -20,45 +20,39 @@ package org.apache.drill.exec.physical.impl.writer;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
 
+import java.io.File;
+import java.nio.file.Paths;
 import java.util.List;
 
-import org.apache.drill.BaseTestQuery;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.apache.drill.exec.record.RecordBatchLoader;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.vector.BigIntVector;
 import org.apache.drill.exec.vector.VarCharVector;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.junit.BeforeClass;
 import org.junit.Test;
 
 import com.google.common.base.Charsets;
 import com.google.common.io.Files;
 
 public class TestWriter extends BaseTestQuery {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestWriter.class);
+  private static final String ROOT_DIR_REPLACEMENT = "%ROOT_DIR%";
+  private static final String TMP_DIR_REPLACEMENT = "%TMP_DIR%";
+  private static final String TEST_DIR_REPLACEMENT = "%TEST_DIR%";
 
-  static FileSystem fs;
-  static String ALTER_SESSION = String.format("ALTER SESSION SET `%s` = 'csv'", ExecConstants.OUTPUT_FORMAT_OPTION);
-
-  @BeforeClass
-  public static void initFs() throws Exception {
-    fs = getLocalFileSystem();
-  }
+  private static final String ALTER_SESSION = String.format("ALTER SESSION SET `%s` = 'csv'", ExecConstants.OUTPUT_FORMAT_OPTION);
 
   @Test
   public void simpleCsv() throws Exception {
-    // before executing the test deleting the existing CSV files in /tmp/csvtest
-    Path path = new Path("/tmp/csvtest");
-    if (fs.exists(path)) {
-      fs.delete(path, true);
-    }
+    File testDir = dirTestWatcher.makeRootSubDir(Paths.get("csvtest"));
 
-    String plan = Files.toString(FileUtils.getResourceAsFile("/writer/simple_csv_writer.json"), Charsets.UTF_8);
+    String plan = Files.toString(DrillFileUtils.getResourceAsFile("/writer/simple_csv_writer.json"), Charsets.UTF_8);
+    plan = plan
+      .replace(ROOT_DIR_REPLACEMENT, dirTestWatcher.getRootDir().getAbsolutePath())
+      .replace(TMP_DIR_REPLACEMENT, dirTestWatcher.getTmpDir().getAbsolutePath())
+      .replace(TEST_DIR_REPLACEMENT, testDir.getAbsolutePath());
 
     List<QueryDataBatch> results = testPhysicalWithResults(plan);
 
@@ -77,49 +71,50 @@ public class TestWriter extends BaseTestQuery {
     assertEquals(132000, recordWrittenV.getAccessor().get(0));
 
     // now verify csv files are written to disk
-    assertTrue(fs.exists(path));
+    assertTrue(testDir.exists());
 
     // expect two files
-    FileStatus[] fileStatuses = fs.globStatus(new Path(path.toString(), "*.csv"));
-    assertTrue(2 == fileStatuses.length);
+    int count = org.apache.commons.io.FileUtils.listFiles(testDir, new String[]{"csv"}, false).size();
+    assertEquals(2, count);
 
     for (QueryDataBatch b : results) {
       b.release();
     }
+
     batchLoader.clear();
   }
 
   @Test
   public void simpleCTAS() throws Exception {
     final String tableName = "simplectas";
-    runSQL("Use dfs_test.tmp");
+    runSQL("Use dfs.tmp");
     runSQL(ALTER_SESSION);
 
     final String testQuery = String.format("CREATE TABLE %s AS SELECT * FROM cp.`employee.json`", tableName);
 
-    testCTASQueryHelper(tableName, testQuery, 1155);
+    testCTASQueryHelper(testQuery, 1155);
   }
 
   @Test
   public void complex1CTAS() throws Exception {
     final String tableName = "complex1ctas";
-    runSQL("Use dfs_test.tmp");
+    runSQL("Use dfs.tmp");
     runSQL(ALTER_SESSION);
     final String testQuery = String.format("CREATE TABLE %s AS SELECT first_name, last_name, " +
         "position_id FROM cp.`employee.json`", tableName);
 
-    testCTASQueryHelper(tableName, testQuery, 1155);
+    testCTASQueryHelper(testQuery, 1155);
   }
 
   @Test
   public void complex2CTAS() throws Exception {
-    final String tableName = "complex1ctas";
-    runSQL("Use dfs_test.tmp");
+    final String tableName = "complex2ctas";
+    runSQL("Use dfs.tmp");
     runSQL(ALTER_SESSION);
     final String testQuery = String.format("CREATE TABLE %s AS SELECT CAST(`birth_date` as Timestamp) FROM " +
         "cp.`employee.json` GROUP BY birth_date", tableName);
 
-    testCTASQueryHelper(tableName, testQuery, 52);
+    testCTASQueryHelper(testQuery, 52);
   }
 
   @Test
@@ -127,21 +122,21 @@ public class TestWriter extends BaseTestQuery {
     final String tableName = "/test/simplectas2";
     runSQL(ALTER_SESSION);
     final String testQuery =
-        String.format("CREATE TABLE dfs_test.tmp.`%s` AS SELECT * FROM cp.`employee.json`",tableName);
+        String.format("CREATE TABLE dfs.tmp.`%s` AS SELECT * FROM cp.`employee.json`",tableName);
 
-    testCTASQueryHelper(tableName, testQuery, 1155);
+    testCTASQueryHelper(testQuery, 1155);
   }
 
   @Test
   public void simpleParquetDecimal() throws Exception {
     try {
       final String tableName = "simpleparquetdecimal";
-      final String testQuery = String.format("CREATE TABLE dfs_test.tmp.`%s` AS SELECT cast(salary as " +
+      final String testQuery = String.format("CREATE TABLE dfs.tmp.`%s` AS SELECT cast(salary as " +
           "decimal(30,2)) * -1 as salary FROM cp.`employee.json`", tableName);
 
       // enable decimal
       test(String.format("alter session set `%s` = true", PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY));
-      testCTASQueryHelper(tableName, testQuery, 1155);
+      testCTASQueryHelper(testQuery, 1155);
 
       // disable decimal
     } finally {
@@ -149,42 +144,31 @@ public class TestWriter extends BaseTestQuery {
     }
   }
 
-  private void testCTASQueryHelper(String tableName, String testQuery, int expectedOutputCount) throws Exception {
-    try {
-      List<QueryDataBatch> results = testSqlWithResults(testQuery);
-
-      RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
+  private void testCTASQueryHelper(String testQuery, int expectedOutputCount) throws Exception {
+    List<QueryDataBatch> results = testSqlWithResults(testQuery);
 
-      int recordsWritten = 0;
-      for (QueryDataBatch batch : results) {
-        batchLoader.load(batch.getHeader().getDef(), batch.getData());
+    RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
 
-        if (batchLoader.getRecordCount() <= 0) {
-          continue;
-        }
+    int recordsWritten = 0;
+    for (QueryDataBatch batch : results) {
+      batchLoader.load(batch.getHeader().getDef(), batch.getData());
 
-        BigIntVector recordWrittenV = (BigIntVector) batchLoader.getValueAccessorById(BigIntVector.class, 1).getValueVector();
+      if (batchLoader.getRecordCount() <= 0) {
+        continue;
+      }
 
-        for (int i = 0; i < batchLoader.getRecordCount(); i++) {
-          recordsWritten += recordWrittenV.getAccessor().get(i);
-        }
+      BigIntVector recordWrittenV = (BigIntVector) batchLoader
+        .getValueAccessorById(BigIntVector.class, 1)
+        .getValueVector();
 
-        batchLoader.clear();
-        batch.release();
+      for (int i = 0; i < batchLoader.getRecordCount(); i++) {
+        recordsWritten += recordWrittenV.getAccessor().get(i);
       }
 
-      assertEquals(expectedOutputCount, recordsWritten);
-    } finally {
-      try {
-        Path path = new Path(getDfsTestTmpSchemaLocation(), tableName);
-        if (fs.exists(path)) {
-          fs.delete(path, true);
-        }
-      } catch (Exception e) {
-        // ignore exceptions.
-        logger.warn("Failed to delete the table [{}, {}] created as part of the test",
-            getDfsTestTmpSchemaLocation(), tableName);
-      }
+      batchLoader.clear();
+      batch.release();
     }
+
+    assertEquals(expectedOutputCount, recordsWritten);
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java
index 66b1f0e..c52f1a9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestExternalSort.java
@@ -17,18 +17,24 @@
  */
 package org.apache.drill.exec.physical.impl.xsort;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.common.types.TypeProtos;
+import org.apache.drill.common.types.Types;
+import org.apache.drill.exec.record.BatchSchema;
+import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.TestBuilder;
 import org.apache.drill.categories.OperatorTest;
-import org.apache.drill.TestBuilder;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.test.rowSet.RowSet;
+import org.apache.drill.test.rowSet.RowSetBuilder;
+import org.apache.drill.test.rowSet.SchemaBuilder;
+import org.apache.drill.test.rowSet.file.JsonFileBuilder;
 import org.junit.Ignore;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
-import java.io.BufferedOutputStream;
 import java.io.File;
-import java.io.FileOutputStream;
+import java.nio.file.Paths;
 
 @Category({SlowTest.class, OperatorTest.class})
 public class TestExternalSort extends BaseTestQuery {
@@ -45,29 +51,48 @@ public class TestExternalSort extends BaseTestQuery {
 
   private void testNumericTypes(boolean testLegacy) throws Exception {
     final int record_count = 10000;
-    String dfs_temp = getDfsTestTmpSchemaLocation();
-    System.out.println(dfs_temp);
-    File table_dir = new File(dfs_temp, "numericTypes");
-    table_dir.mkdir();
-    try(BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "a.json")))) {
-      String format = "{ a : %d }%n";
+    final String tableDirName = "numericTypes";
+
+    {
+      final BatchSchema schema = new SchemaBuilder()
+        .add("a", Types.required(TypeProtos.MinorType.INT))
+        .build();
+      final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
+
       for (int i = 0; i <= record_count; i += 2) {
-        os.write(String.format(format, i).getBytes());
+        rowSetBuilder.add(i);
       }
+
+      final RowSet rowSet = rowSetBuilder.build();
+      final File tableFile = createTableFile(tableDirName, "a.json");
+      new JsonFileBuilder(rowSet).build(tableFile);
+      rowSet.clear();
     }
-    try(BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "b.json")))) {
-      String format = "{ a : %.2f }%n";
-      for (int i = 1; i <= record_count; i+=2) {
-        os.write(String.format(format, (float) i).getBytes());
+
+    {
+      final BatchSchema schema = new SchemaBuilder()
+        .add("a", Types.required(TypeProtos.MinorType.FLOAT4))
+        .build();
+      final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
+
+      for (int i = 1; i <= record_count; i += 2) {
+        rowSetBuilder.add((float) i);
       }
+
+      final RowSet rowSet = rowSetBuilder.build();
+      final File tableFile = createTableFile(tableDirName, "b.json");
+      new JsonFileBuilder(rowSet)
+        .setCustomFormatter("a", "%.2f")
+        .build(tableFile);
+      rowSet.clear();
     }
-    String query = "select * from dfs_test.tmp.numericTypes order by a desc";
+
     TestBuilder builder = testBuilder()
-            .sqlQuery(query)
-            .optionSettingQueriesForTestQuery(getOptions(testLegacy))
-            .ordered()
-            .baselineColumns("a");
-    for (int i = record_count; i >= 0;) {
+      .sqlQuery("select * from dfs.`%s` order by a desc", tableDirName)
+      .optionSettingQueriesForTestQuery(getOptions(testLegacy))
+      .ordered()
+      .baselineColumns("a");
+    for (int i = record_count; i >= 0; ) {
       builder.baselineValues((long) i--);
       if (i >= 0) {
         builder.baselineValues((double) i--);
@@ -97,25 +122,44 @@ public class TestExternalSort extends BaseTestQuery {
 
   private void testNumericAndStringTypes(boolean testLegacy) throws Exception {
     final int record_count = 10000;
-    String dfs_temp = getDfsTestTmpSchemaLocation();
-    System.out.println(dfs_temp);
-    File table_dir = new File(dfs_temp, "numericAndStringTypes");
-    table_dir.mkdir();
-    try (BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "a.json")))) {
-      String format = "{ a : %d }%n";
+    final String tableDirName = "numericAndStringTypes";
+
+    {
+      final BatchSchema schema = new SchemaBuilder()
+        .add("a", Types.required(TypeProtos.MinorType.INT))
+        .build();
+      final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
+
       for (int i = 0; i <= record_count; i += 2) {
-        os.write(String.format(format, i).getBytes());
+        rowSetBuilder.add(i);
       }
+
+      final RowSet rowSet = rowSetBuilder.build();
+      final File tableFile = createTableFile(tableDirName, "a.json");
+      new JsonFileBuilder(rowSet).build(tableFile);
+      rowSet.clear();
     }
-    try (BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "b.json")))) {
-      String format = "{ a : \"%05d\" }%n";
-      for (int i = 1; i <= record_count; i+=2) {
-        os.write(String.format(format, i).getBytes());
+
+    {
+      final BatchSchema schema = new SchemaBuilder()
+        .add("a", Types.required(TypeProtos.MinorType.INT))
+        .build();
+      final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
+
+      for (int i = 1; i <= record_count; i += 2) {
+        rowSetBuilder.add(i);
       }
+
+      final RowSet rowSet = rowSetBuilder.build();
+      final File tableFile = createTableFile(tableDirName, "b.json");
+      new JsonFileBuilder(rowSet)
+        .setCustomFormatter("a", "\"%05d\"")
+        .build(tableFile);
+      rowSet.clear();
     }
-    String query = "select * from dfs_test.tmp.numericAndStringTypes order by a desc";
+
     TestBuilder builder = testBuilder()
-            .sqlQuery(query)
+            .sqlQuery("select * from dfs.`%s` order by a desc", tableDirName)
             .ordered()
             .optionSettingQueriesForTestQuery(getOptions(testLegacy))
             .baselineColumns("a");
@@ -138,7 +182,6 @@ public class TestExternalSort extends BaseTestQuery {
     testNewColumns(false);
   }
 
-
   @Test
   public void testNewColumnsLegacy() throws Exception {
     testNewColumns(true);
@@ -146,26 +189,45 @@ public class TestExternalSort extends BaseTestQuery {
 
   private void testNewColumns(boolean testLegacy) throws Exception {
     final int record_count = 10000;
-    String dfs_temp = getDfsTestTmpSchemaLocation();
-    System.out.println(dfs_temp);
-    File table_dir = new File(dfs_temp, "newColumns");
-    table_dir.mkdir();
-    try (BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "a.json")))) {
-      String format = "{ a : %d, b : %d }%n";
+    final String tableDirName = "newColumns";
+
+    {
+      final BatchSchema schema = new SchemaBuilder()
+        .add("a", Types.required(TypeProtos.MinorType.INT))
+        .add("b", Types.required(TypeProtos.MinorType.INT))
+        .build();
+      final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
+
       for (int i = 0; i <= record_count; i += 2) {
-        os.write(String.format(format, i, i).getBytes());
+        rowSetBuilder.add(i, i);
       }
+
+      final RowSet rowSet = rowSetBuilder.build();
+      final File tableFile = createTableFile(tableDirName, "a.json");
+      new JsonFileBuilder(rowSet).build(tableFile);
+      rowSet.clear();
     }
-    try (BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "b.json")))) {
-      String format = "{ a : %d, c : %d }%n";
-      for (int i = 1; i <= record_count; i+=2) {
-        os.write(String.format(format, i, i).getBytes());
+
+    {
+      final BatchSchema schema = new SchemaBuilder()
+        .add("a", Types.required(TypeProtos.MinorType.INT))
+        .add("c", Types.required(TypeProtos.MinorType.INT))
+        .build();
+      final RowSetBuilder rowSetBuilder = new RowSetBuilder(allocator, schema);
+
+      for (int i = 1; i <= record_count; i += 2) {
+        rowSetBuilder.add(i, i);
       }
+
+      final RowSet rowSet = rowSetBuilder.build();
+      final File tableFile = createTableFile(tableDirName, "b.json");
+      new JsonFileBuilder(rowSet).build(tableFile);
+      rowSet.clear();
     }
-    String query = "select a, b, c from dfs_test.tmp.newColumns order by a desc";
-//    Test framework currently doesn't handle changing schema (i.e. new columns) on the client side
+
+    // Test framework currently doesn't handle changing schema (i.e. new columns) on the client side
     TestBuilder builder = testBuilder()
-            .sqlQuery(query)
+            .sqlQuery("select a, b, c from dfs.`%s` order by a desc", tableDirName)
             .ordered()
             .optionSettingQueriesForTestQuery(getOptions(testLegacy))
             .baselineColumns("a", "b", "c");
@@ -176,7 +238,14 @@ public class TestExternalSort extends BaseTestQuery {
       }
     }
     builder.go();
-    String newQuery = "select * from dfs_test.tmp.newColumns order by a desc";
-    test(newQuery);
+    test("select * from dfs.`%s` order by a desc", tableDirName);
+  }
+
+  private File createTableFile(final String tableDirName, final String fileName) {
+    return dirTestWatcher
+      .getRootDir()
+      .toPath()
+      .resolve(Paths.get(tableDirName, fileName))
+      .toFile();
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java
index aa0d4ee..21de6c5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSimpleExternalSort.java
@@ -24,13 +24,14 @@ import java.util.List;
 
 import org.apache.drill.common.expression.ExpressionPosition;
 import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.test.TestTools;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.memory.BufferAllocator;
 import org.apache.drill.exec.record.RecordBatchLoader;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.vector.BigIntVector;
+import org.apache.drill.test.BaseDirTestWatcher;
 import org.apache.drill.test.ClientFixture;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.DrillTest;
@@ -43,9 +44,11 @@ import org.junit.rules.TestRule;
 
 @Category(SlowTest.class)
 public class TestSimpleExternalSort extends DrillTest {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestSimpleExternalSort.class);
+  @Rule
+  public final TestRule TIMEOUT = TestTools.getTimeoutRule(160_000);
 
-  @Rule public final TestRule TIMEOUT = TestTools.getTimeoutRule(160_000);
+  @Rule
+  public final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher();
 
   @Test
   public void mergeSortWithSv2Managed() throws Exception {
@@ -69,9 +72,8 @@ public class TestSimpleExternalSort extends DrillTest {
    */
 
   private void mergeSortWithSv2(boolean testLegacy) throws Exception {
-    ClusterFixtureBuilder builder = ClusterFixture.builder()
-        .configProperty(ExecConstants.EXTERNAL_SORT_DISABLE_MANAGED, false)
-         ;
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+        .configProperty(ExecConstants.EXTERNAL_SORT_DISABLE_MANAGED, false);
     try (ClusterFixture cluster = builder.build();
          ClientFixture client = cluster.clientFixture()) {
       chooseImpl(client, testLegacy);
@@ -96,9 +98,8 @@ public class TestSimpleExternalSort extends DrillTest {
   }
 
   private void sortOneKeyDescendingMergeSort(boolean testLegacy) throws Throwable {
-    ClusterFixtureBuilder builder = ClusterFixture.builder()
-        .configProperty(ExecConstants.EXTERNAL_SORT_DISABLE_MANAGED, false)
-         ;
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+        .configProperty(ExecConstants.EXTERNAL_SORT_DISABLE_MANAGED, false);
     try (ClusterFixture cluster = builder.build();
          ClientFixture client = cluster.clientFixture()) {
       chooseImpl(client, testLegacy);
@@ -147,12 +148,11 @@ public class TestSimpleExternalSort extends DrillTest {
   }
 
   private void sortOneKeyDescendingExternalSort(boolean testLegacy) throws Throwable {
-    ClusterFixtureBuilder builder = ClusterFixture.builder()
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
         .configProperty(ExecConstants.EXTERNAL_SORT_SPILL_THRESHOLD, 4)
         .configProperty(ExecConstants.EXTERNAL_SORT_SPILL_GROUP_SIZE, 4)
         .configProperty(ExecConstants.EXTERNAL_SORT_BATCH_LIMIT, 4)
-        .configProperty(ExecConstants.EXTERNAL_SORT_DISABLE_MANAGED, false)
-        ;
+        .configProperty(ExecConstants.EXTERNAL_SORT_DISABLE_MANAGED, false);
     try (ClusterFixture cluster = builder.build();
          ClientFixture client = cluster.clientFixture()) {
       chooseImpl(client,testLegacy);
@@ -173,14 +173,13 @@ public class TestSimpleExternalSort extends DrillTest {
   }
 
   private void outOfMemoryExternalSort(boolean testLegacy) throws Throwable{
-    ClusterFixtureBuilder builder = ClusterFixture.builder()
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
         // Probably do nothing in modern Drill
         .configProperty("drill.memory.fragment.max", 50_000_000)
         .configProperty("drill.memory.fragment.initial", 2_000_000)
         .configProperty("drill.memory.operator.max", 30_000_000)
         .configProperty("drill.memory.operator.initial", 2_000_000)
-        .configProperty(ExecConstants.EXTERNAL_SORT_DISABLE_MANAGED, testLegacy)
-        ;
+        .configProperty(ExecConstants.EXTERNAL_SORT_DISABLE_MANAGED, testLegacy);
     try (ClusterFixture cluster = builder.build();
          ClientFixture client = cluster.clientFixture()) {
       chooseImpl(client,testLegacy);

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSortSpillWithException.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSortSpillWithException.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSortSpillWithException.java
index 2694f64..cd3f5fd 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSortSpillWithException.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/xsort/TestSortSpillWithException.java
@@ -22,18 +22,20 @@ import static org.junit.Assert.assertTrue;
 import static org.junit.Assert.fail;
 
 import java.io.IOException;
+import java.nio.file.Paths;
 
 import org.apache.drill.categories.OperatorTest;
 import org.apache.drill.common.exceptions.UserRemoteException;
-import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType;
 import org.apache.drill.exec.testing.Controls;
 import org.apache.drill.exec.testing.ControlsInjectionUtil;
+import org.apache.drill.test.BaseDirTestWatcher;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterTest;
 import org.apache.drill.test.ClusterFixtureBuilder;
 import org.junit.BeforeClass;
+import org.junit.ClassRule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
@@ -47,19 +49,21 @@ import org.junit.experimental.categories.Category;
  */
 @Category(OperatorTest.class)
 public class TestSortSpillWithException extends ClusterTest {
-  private static final String TEST_RES_PATH = TestTools.getWorkingPath() + "/src/test/resources";
+  @ClassRule
+  public static final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher();
 
   @BeforeClass
   public static void setup() throws Exception {
-    ClusterFixtureBuilder builder = ClusterFixture.builder()
+    dirTestWatcher.copyResourceToRoot(Paths.get("xsort"));
+
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
         .configProperty(ExecConstants.EXTERNAL_SORT_SPILL_THRESHOLD, 1) // Unmanaged
         .configProperty(ExecConstants.EXTERNAL_SORT_SPILL_GROUP_SIZE, 1) // Unmanaged
         .sessionOption(ExecConstants.MAX_QUERY_MEMORY_PER_NODE_KEY, 60 * 1024 * 1024) // Spill early
         // Prevent the percent-based memory rule from second-guessing the above.
         .sessionOption(ExecConstants.PERCENT_MEMORY_PER_QUERY_KEY, 0.0)
         .configProperty(ExecConstants.EXTERNAL_SORT_DISABLE_MANAGED, false)
-        .maxParallelization(1)
-        ;
+        .maxParallelization(1);
     startCluster(builder);
   }
 
@@ -77,7 +81,7 @@ public class TestSortSpillWithException extends ClusterTest {
     ControlsInjectionUtil.setControls(cluster.client(), controls);
     // run a simple order by query
     try {
-      test("select employee_id from dfs_test.`%s/xsort/2batches` order by employee_id", TEST_RES_PATH);
+      test("select employee_id from dfs.`xsort/2batches` order by employee_id");
       fail("Query should have failed!");
     } catch (UserRemoteException e) {
       assertEquals(ErrorType.RESOURCE, e.getErrorType());
@@ -101,7 +105,6 @@ public class TestSortSpillWithException extends ClusterTest {
     // run a simple order by query
     try {
       test("SELECT id_i, name_s250 FROM `mock`.`employee_500K` ORDER BY id_i");
-//      test("select employee_id from dfs_test.`%s/xsort/2batches` order by employee_id", TEST_RES_PATH);
       fail("Query should have failed!");
     } catch (UserRemoteException e) {
       assertEquals(ErrorType.RESOURCE, e.getErrorType());

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/BasicPhysicalOpUnitTest.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/BasicPhysicalOpUnitTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/BasicPhysicalOpUnitTest.java
index 62c961d..ec85f21 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/BasicPhysicalOpUnitTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/BasicPhysicalOpUnitTest.java
@@ -17,7 +17,7 @@
  */
 package org.apache.drill.exec.physical.unit;
 
-import static org.apache.drill.TestBuilder.mapOf;
+import static org.apache.drill.test.TestBuilder.mapOf;
 
 import java.util.List;
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java
index ca226e6..754d858 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/MiniPlanUnitTestBase.java
@@ -21,7 +21,7 @@ package org.apache.drill.exec.physical.unit;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import mockit.NonStrictExpectations;
-import org.apache.drill.DrillTestWrapper;
+import org.apache.drill.test.DrillTestWrapper;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.physical.base.PhysicalOperator;
 import org.apache.drill.exec.physical.impl.BatchCreator;
@@ -35,7 +35,6 @@ import org.apache.drill.exec.store.dfs.DrillFileSystem;
 import org.apache.drill.exec.store.parquet.ParquetDirectByteBufferAllocator;
 import org.apache.drill.exec.store.parquet.ParquetReaderUtility;
 import org.apache.drill.exec.store.parquet.columnreaders.ParquetRecordReader;
-import org.apache.drill.exec.util.TestUtilities;
 import org.apache.hadoop.fs.Path;
 import org.apache.parquet.hadoop.CodecFactory;
 import org.apache.parquet.hadoop.ParquetFileReader;
@@ -51,7 +50,6 @@ import java.util.TreeMap;
 import java.util.concurrent.ExecutorService;
 import java.util.concurrent.Executors;
 
-import static org.apache.drill.DrillTestWrapper.addToCombinedVectorResults;
 import static org.apache.drill.exec.physical.base.AbstractBase.INIT_ALLOCATION;
 import static org.apache.drill.exec.physical.base.AbstractBase.MAX_ALLOCATION;
 
@@ -403,9 +401,9 @@ public class MiniPlanUnitTestBase extends PhysicalOpUnitTestBase {
       Iterator<RecordReader> readers = null;
 
       if (jsonBatches != null) {
-        readers = TestUtilities.getJsonReadersFromBatchString(jsonBatches, fragContext, columnsToRead);
+        readers = getJsonReadersFromBatchString(jsonBatches, fragContext, columnsToRead);
       } else {
-        readers = TestUtilities.getJsonReadersFromInputFiles(fs, inputPaths, fragContext, columnsToRead);
+        readers = getJsonReadersFromInputFiles(fs, inputPaths, fragContext, columnsToRead);
       }
 
       List<RecordReader> readerList = new ArrayList<>();

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java
index 26345c9..fda4442 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/PhysicalOpUnitTestBase.java
@@ -17,6 +17,8 @@
  */
 package org.apache.drill.exec.physical.unit;
 
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.common.base.Preconditions;
 import com.google.common.collect.Lists;
 import mockit.Delegate;
@@ -26,7 +28,9 @@ import org.antlr.runtime.ANTLRStringStream;
 import org.antlr.runtime.CommonTokenStream;
 import org.antlr.runtime.RecognitionException;
 import org.apache.calcite.rel.RelFieldCollation;
-import org.apache.drill.DrillTestWrapper;
+import org.apache.drill.exec.store.dfs.DrillFileSystem;
+import org.apache.drill.exec.store.easy.json.JSONRecordReader;
+import org.apache.drill.test.DrillTestWrapper;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.expression.FieldReference;
@@ -64,9 +68,9 @@ import org.apache.drill.exec.physical.impl.project.ProjectorTemplate;
 import org.apache.drill.exec.proto.ExecProtos;
 import org.apache.drill.exec.record.RecordBatch;
 import org.apache.drill.exec.record.VectorAccessible;
+import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.RecordReader;
 import org.apache.drill.exec.testing.ExecutionControls;
-import org.apache.drill.exec.util.TestUtilities;
 import org.junit.Before;
 
 import java.io.IOException;
@@ -82,10 +86,8 @@ import java.util.Map;
  * Look! Doesn't extend BaseTestQuery!!
  */
 public class PhysicalOpUnitTestBase extends ExecTest {
-//  public static long INIT_ALLOCATION = 1_000_000l;
-//  public static long MAX_ALLOCATION = 10_000_000L;
-
   @Injectable FragmentContext fragContext;
+  @Injectable DrillbitContext drillbitContext;
   @Injectable OperatorContext opContext;
   @Injectable OperatorStats opStats;
   @Injectable PhysicalOperator popConf;
@@ -295,6 +297,7 @@ public class PhysicalOpUnitTestBase extends ExecTest {
 //        // string options
 //        optManager.getOption(withAny(new TypeValidators.StringValidator("", "try"))); result = "try";
 //        optManager.getOption(withAny(new TypeValidators.PositiveLongValidator("", 1l, 1l))); result = 10;
+        drillbitContext.getCompiler(); result = new CodeCompiler(drillConf, optionManager);
         fragContext.getOptions(); result = optionManager;
         fragContext.getOptionSet(); result = optionManager;
         fragContext.getManagedBuffer(); result = bufManager.getManagedBuffer();
@@ -304,10 +307,11 @@ public class PhysicalOpUnitTestBase extends ExecTest {
         fragContext.getConfig(); result = drillConf;
         fragContext.getHandle(); result = ExecProtos.FragmentHandle.getDefaultInstance();
         fragContext.getFunctionRegistry(); result = funcReg;
+        fragContext.getDrillbitContext(); result = drillbitContext;
         try {
-          CodeGenerator<?> cg = CodeGenerator.get(templateClassDefinition, funcReg);
+          CodeGenerator<?> cg = CodeGenerator.get(templateClassDefinition);
           cg.plainJavaCapable(true);
-//          cg.saveCodeForDebugging(true);
+          // cg.saveCodeForDebugging(true);
           fragContext.getImplementationClass(withAny(cg));
           result = new Delegate<Object>()
           {
@@ -316,7 +320,7 @@ public class PhysicalOpUnitTestBase extends ExecTest {
               return compiler.createInstance(gen);
             }
           };
-          fragContext.getImplementationClass(withAny(CodeGenerator.get(templateClassDefinition, funcReg).getRoot()));
+          fragContext.getImplementationClass(withAny(CodeGenerator.get(templateClassDefinition).getRoot()));
           result = new Delegate<Object>()
           {
             @SuppressWarnings("unused")
@@ -352,7 +356,7 @@ public class PhysicalOpUnitTestBase extends ExecTest {
   }
 
   private Iterator<RecordReader> getRecordReadersForJsonBatches(List<String> jsonBatches, FragmentContext fragContext) {
-    return TestUtilities.getJsonReadersFromBatchString(jsonBatches, fragContext, Collections.singletonList(SchemaPath.getSimplePath("*")));
+    return getJsonReadersFromBatchString(jsonBatches, fragContext, Collections.singletonList(SchemaPath.getSimplePath("*")));
   }
 
   private List<RecordReader> getReaderListForJsonBatches(List<String> jsonBatches, FragmentContext fragContext) {
@@ -364,4 +368,43 @@ public class PhysicalOpUnitTestBase extends ExecTest {
     return readerList;
   }
 
+
+  /**
+   * Create JSONRecordReader from input strings.
+   * @param jsonBatches : list of input strings, each element represent a batch. Each string could either
+   *                    be in the form of "[{...}, {...}, ..., {...}]", or in the form of "{...}".
+   * @param fragContext : fragment context
+   * @param columnsToRead : list of schema paths to read from JSON reader.
+   * @return
+   */
+  public static Iterator<RecordReader> getJsonReadersFromBatchString(List<String> jsonBatches, FragmentContext fragContext, List<SchemaPath> columnsToRead) {
+    ObjectMapper mapper = new ObjectMapper();
+    List<RecordReader> readers = new ArrayList<>();
+    for (String batchJason : jsonBatches) {
+      JsonNode records;
+      try {
+        records = mapper.readTree(batchJason);
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+      readers.add(new JSONRecordReader(fragContext, records, null, columnsToRead));
+    }
+    return readers.iterator();
+  }
+
+  /**
+   * Create JSONRecordReader from files on a file system.
+   * @param fs : file system.
+   * @param inputPaths : list of .json file paths.
+   * @param fragContext
+   * @param columnsToRead
+   * @return
+   */
+  public static Iterator<RecordReader> getJsonReadersFromInputFiles(DrillFileSystem fs, List<String> inputPaths, FragmentContext fragContext, List<SchemaPath> columnsToRead) {
+    List<RecordReader> readers = new ArrayList<>();
+    for (String inputPath : inputPaths) {
+      readers.add(new JSONRecordReader(fragContext, inputPath, fs, columnsToRead));
+    }
+    return readers.iterator();
+  }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java
index 43b1c9d..79b9dbc 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestMiniPlan.java
@@ -21,7 +21,7 @@ package org.apache.drill.exec.physical.unit;
 import com.google.common.collect.Lists;
 import org.apache.drill.categories.PlannerTest;
 import org.apache.drill.common.types.TypeProtos;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.physical.config.Filter;
 import org.apache.drill.exec.physical.config.UnionAll;
 import org.apache.drill.exec.record.BatchSchema;
@@ -57,7 +57,7 @@ public class TestMiniPlan extends MiniPlanUnitTestBase {
 
   @Test
   public void testSimpleParquetScan() throws Exception {
-    String file = FileUtils.getResourceAsFile("/tpchmulti/region/01.parquet").toURI().toString();
+    String file = DrillFileUtils.getResourceAsFile("/tpchmulti/region/01.parquet").toURI().toString();
 
     RecordBatch scanBatch = new ParquetScanBuilder()
         .fileSystem(fs)

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java
index 1127314..c8dcd28 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/unit/TestNullInputMiniPlan.java
@@ -23,7 +23,7 @@ import org.apache.calcite.rel.RelFieldCollation;
 import org.apache.calcite.rel.core.JoinRelType;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.common.types.TypeProtos;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.physical.base.PhysicalOperator;
 import org.apache.drill.exec.physical.config.ExternalSort;
 import org.apache.drill.exec.physical.config.Filter;
@@ -272,7 +272,7 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
 
     RecordBatch left = createScanBatchFromJson(SINGLE_EMPTY_JSON);
 
-    String file = FileUtils.getResourceAsFile("/tpchmulti/region/01.parquet").toURI().toString();
+    String file = DrillFileUtils.getResourceAsFile("/tpchmulti/region/01.parquet").toURI().toString();
 
     RecordBatch scanBatch = new ParquetScanBuilder()
         .fileSystem(fs)
@@ -558,7 +558,7 @@ public class TestNullInputMiniPlan extends MiniPlanUnitTestBase{
     List<String> inputPaths = new ArrayList<>();
 
     for (String resource : resourcePaths) {
-      inputPaths.add(FileUtils.getResourceAsFile(resource).toURI().toString());
+      inputPaths.add(DrillFileUtils.getResourceAsFile(resource).toURI().toString());
     }
 
     RecordBatch scanBatch = new JsonScanBuilder()

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/planner/TestDirectoryExplorerUDFs.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/TestDirectoryExplorerUDFs.java b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/TestDirectoryExplorerUDFs.java
index 2407ee4..1a1127d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/TestDirectoryExplorerUDFs.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/TestDirectoryExplorerUDFs.java
@@ -17,30 +17,27 @@
 */
 package org.apache.drill.exec.planner;
 
+import java.io.File;
 import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.util.List;
 import java.util.Map;
 
 import com.google.common.collect.ImmutableMap;
+import org.apache.commons.io.FileUtils;
 import org.apache.drill.PlanTestBase;
 import org.apache.drill.categories.PlannerTest;
 import org.apache.drill.categories.SqlTest;
 import org.apache.drill.common.exceptions.UserRemoteException;
-import org.apache.drill.common.util.TestTools;
 import org.apache.drill.exec.fn.interp.TestConstantFolding;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.util.JsonStringArrayList;
-import org.apache.drill.exec.util.TestUtilities;
+import org.apache.drill.exec.util.StoragePluginTestUtils;
 import org.apache.drill.exec.util.Text;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.FileUtil;
-import org.apache.hadoop.fs.Path;
-import org.junit.Before;
 import org.junit.BeforeClass;
-import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.rules.TemporaryFolder;
 
 import com.google.common.collect.ImmutableList;
 import com.google.common.collect.Lists;
@@ -61,13 +58,11 @@ public class TestDirectoryExplorerUDFs extends PlanTestBase {
   }
 
   private static List<ConstantFoldingTestConfig> tests;
-  private String path;
-
-  @Rule
-  public TemporaryFolder folder = new TemporaryFolder();
+  private static Path path;
+  private static File test;
 
   @BeforeClass
-  public static void init() {
+  public static void init() throws Exception {
     // Need the suffixes to make the names unique in the directory.
     // The capitalized name is on the opposite function (imaxdir and mindir)
     // because they are looking on opposite ends of the list.
@@ -82,18 +77,16 @@ public class TestDirectoryExplorerUDFs extends PlanTestBase {
         .add(new ConstantFoldingTestConfig("MINDIR", "BIGFILE_2"))
         .add(new ConstantFoldingTestConfig("IMINDIR", "bigfile"))
         .build();
-  }
 
-  @Before
-  public void setup() throws Exception {
-    new TestConstantFolding.SmallFileCreator(folder).createFiles(1, 1000);
-    path = folder.getRoot().toPath().toString();
-  }
+    path = Paths.get("test");
+    test = dirTestWatcher.makeRootSubDir(path);
 
+    dirTestWatcher.copyResourceToRoot(Paths.get("parquet"));
+    new TestConstantFolding.SmallFileCreator(test).createFiles(1, 1000);
+  }
 
   @Test
   public void testConstExprFolding_maxDir0() throws Exception {
-
     test("use dfs.root");
 
     List<String> allFiles = ImmutableList.<String>builder()
@@ -103,7 +96,7 @@ public class TestDirectoryExplorerUDFs extends PlanTestBase {
         .add("BIGFILE_2")
         .build();
 
-    String query = "select * from dfs.`" + path + "/*/*.csv` where dir0 = %s('dfs.root','" + path + "')";
+    String query = "select * from dfs.`%s/*/*.csv` where dir0 = %s('dfs.root','%s')";
     for (ConstantFoldingTestConfig config : tests) {
       // make all of the other folders unexpected patterns, except for the one expected in this case
       List<String> excludedPatterns = Lists.newArrayList();
@@ -114,7 +107,7 @@ public class TestDirectoryExplorerUDFs extends PlanTestBase {
       String[] excludedArray = new String[excludedPatterns.size()];
 
       testPlanMatchingPatterns(
-          String.format(query, config.funcName),
+          String.format(query, path, config.funcName, path),
           new String[] {config.expectedFolderName},
           excludedPatterns.toArray(excludedArray));
     }
@@ -126,7 +119,7 @@ public class TestDirectoryExplorerUDFs extends PlanTestBase {
     list.add(new Text("3"));
 
     testBuilder()
-        .sqlQuery(String.format(query, tests.get(0).funcName))
+        .sqlQuery(query, path, tests.get(0).funcName, path)
         .unOrdered()
         .baselineColumns("columns", "dir0")
         .baselineValues(list, tests.get(0).expectedFolderName)
@@ -148,7 +141,7 @@ public class TestDirectoryExplorerUDFs extends PlanTestBase {
     for (Map.Entry<String, String> configEntry : configMap.entrySet()) {
       for (ConstantFoldingTestConfig functionConfig : tests) {
         try {
-          test(String.format(configEntry.getKey(), functionConfig.funcName));
+          test(configEntry.getKey(), functionConfig.funcName);
         } catch (UserRemoteException e) {
           assertThat(e.getMessage(), containsString(
               String.format("Directory explorers [MAXDIR, IMAXDIR, MINDIR, IMINDIR] functions are not supported in %s", configEntry.getValue())));
@@ -164,7 +157,7 @@ public class TestDirectoryExplorerUDFs extends PlanTestBase {
       String query = "select * from dfs.`" + path + "/*/*.csv` where dir0 = %s('dfs.root','" + path + "')";
       for (ConstantFoldingTestConfig config : tests) {
         try {
-          test(String.format(query, config.funcName));
+          test(query, config.funcName);
         } catch (UserRemoteException e) {
           assertThat(e.getMessage(), containsString("Directory explorers [MAXDIR, IMAXDIR, MINDIR, IMINDIR] functions can not be used " +
               "when planner.enable_constant_folding option is set to false"));
@@ -180,31 +173,34 @@ public class TestDirectoryExplorerUDFs extends PlanTestBase {
     //Initially update the location of dfs_test.tmp workspace with "path" temp directory just for use in this UTest
     final StoragePluginRegistry pluginRegistry = getDrillbitContext().getStorage();
     try {
-      TestUtilities.updateDfsTestTmpSchemaLocation(pluginRegistry, path);
+      StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry, test, StoragePluginTestUtils.TMP_SCHEMA);
 
       //Results comparison of using Query Directory Functions (MAXDIR, IMAXDIR, MINDIR, IMINDIR) with one and two arguments
-      String queryWithTwoArgFunc = "select * from dfs.`" + path + "/*/*.csv` where dir0 = %s('dfs.root','" + path + "')";
-      String queryWithOneArgFunc = "select * from dfs.`" + path + "/*/*.csv` where dir0 = %s('dfs_test.tmp')";
-      for (ConstantFoldingTestConfig config : tests) {
-        testBuilder()
-            .sqlQuery(String.format(queryWithOneArgFunc, config.funcName))
-            .unOrdered()
-            .sqlBaselineQuery(String.format(queryWithTwoArgFunc, config.funcName))
-            .go();
-      }
+    String queryWithTwoArgFunc = "select * from dfs.`" + path + "/*/*.csv` where dir0 = %s('dfs.root','" + path + "')";
+    String queryWithOneArgFunc = "select * from dfs.`" + path + "/*/*.csv` where dir0 = %s('dfs.tmp')";
+    for (ConstantFoldingTestConfig config : tests) {
+      testBuilder()
+          .sqlQuery(queryWithOneArgFunc, config.funcName)
+          .unOrdered()
+          .sqlBaselineQuery(queryWithTwoArgFunc, config.funcName)
+          .go();
+    }
     } finally {
-        TestUtilities.updateDfsTestTmpSchemaLocation(pluginRegistry, getDfsTestTmpSchemaLocation());
+      StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry, dirTestWatcher.getDfsTestTmpDir(), StoragePluginTestUtils.TMP_SCHEMA);
     }
   }
 
   @Test // DRILL-4720
   public void testDirectoryUDFsWithAndWithoutMetadataCache() throws Exception {
-    FileSystem fs = getLocalFileSystem();
     // prepare test table with partitions
-    Path table = new Path(getTempDir("table_with_partitions"));
-    String tablePath = table.toUri().getPath();
-    Path dataFile = new Path(TestTools.getWorkingPath(),"src/test/resources/parquet/alltypes_required.parquet");
-    createPartitions(fs, table, dataFile, 2);
+    Path tableRelPath = Paths.get("table_with_partitions");
+    File tableDir = dirTestWatcher.makeRootSubDir(tableRelPath);
+    File dataFile = dirTestWatcher
+      .getRootDir()
+      .toPath()
+      .resolve(Paths.get("parquet", "alltypes_required.parquet"))
+      .toFile();
+    createPartitions(tableDir.toPath(), dataFile, 2);
 
     Map<String, String> configurations = ImmutableMap.<String, String>builder()
         .put("mindir", "part_1")
@@ -218,7 +214,7 @@ public class TestDirectoryExplorerUDFs extends PlanTestBase {
     // run tests without metadata cache
     for (Map.Entry<String, String> entry : configurations.entrySet()) {
       testBuilder()
-          .sqlQuery(query, tablePath, entry.getKey(), tablePath)
+          .sqlQuery(query, tableRelPath, entry.getKey(), tableRelPath)
           .unOrdered()
           .baselineColumns("dir0")
           .baselineValues(entry.getValue())
@@ -226,12 +222,12 @@ public class TestDirectoryExplorerUDFs extends PlanTestBase {
     }
 
     // generate metadata
-    test("refresh table metadata dfs.`%s`", tablePath);
+    test("refresh table metadata dfs.`%s`", tableRelPath);
 
     // run tests with metadata cache
     for (Map.Entry<String, String> entry : configurations.entrySet()) {
       testBuilder()
-          .sqlQuery(query, tablePath, entry.getKey(), tablePath)
+          .sqlQuery(query, tableRelPath, entry.getKey(), tableRelPath)
           .unOrdered()
           .baselineColumns("dir0")
           .baselineValues(entry.getValue())
@@ -239,12 +235,11 @@ public class TestDirectoryExplorerUDFs extends PlanTestBase {
     }
   }
 
-  private void createPartitions(FileSystem fs, Path table, Path dataFile, int number) throws IOException {
+  private void createPartitions(Path table, File dataFile, int number) throws IOException {
     for (int i = 1; i <= number; i++) {
-      Path partition = new Path(table, "part_" + i);
-      fs.mkdirs(partition);
-      FileUtil.copy(fs, dataFile, fs, partition, false, true, fs.getConf());
+      final Path partition = table.resolve("part_" + i);
+      final File destFile = partition.resolve(dataFile.getName()).toFile();
+      FileUtils.copyFile(dataFile, destFile);
     }
   }
-
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestCaseNullableTypes.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestCaseNullableTypes.java b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestCaseNullableTypes.java
index 631cabd..fe0d433 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestCaseNullableTypes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestCaseNullableTypes.java
@@ -16,7 +16,7 @@
 */
 package org.apache.drill.exec.planner.logical;
 
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlTest;
 import org.joda.time.DateTime;
 import org.joda.time.format.DateTimeFormat;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestConvertCountToDirectScan.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestConvertCountToDirectScan.java b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestConvertCountToDirectScan.java
index 87c2c1c..cb7f22c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestConvertCountToDirectScan.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/logical/TestConvertCountToDirectScan.java
@@ -20,17 +20,25 @@ package org.apache.drill.exec.planner.logical;
 import org.apache.drill.PlanTestBase;
 import org.apache.drill.categories.PlannerTest;
 import org.apache.drill.exec.ExecConstants;
+import org.junit.BeforeClass;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
+import java.nio.file.Paths;
+
 @Category(PlannerTest.class)
 public class TestConvertCountToDirectScan extends PlanTestBase {
 
+  @BeforeClass
+  public static void setupTestFiles() {
+    dirTestWatcher.copyResourceToRoot(Paths.get("directcount.parquet"));
+  }
+
   @Test
   public void ensureCaseDoesntConvertToDirectScan() throws Exception {
     testPlanMatchingPatterns(
         "select count(case when n_name = 'ALGERIA' and n_regionkey = 2 then n_nationkey else null end) as cnt\n" +
-            "from dfs.`${WORKING_PATH}/src/test/resources/directcount.parquet`",
+            "from dfs.`directcount.parquet`",
         new String[] { "CASE" },
         new String[]{});
   }
@@ -117,7 +125,7 @@ public class TestConvertCountToDirectScan extends PlanTestBase {
 
   @Test
   public void ensureConvertForSeveralColumns() throws Exception {
-    test("use %s", TEMP_SCHEMA);
+    test("use dfs.tmp");
     final String tableName = "parquet_table_counts";
 
     try {

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/planner/sql/TestDrillSQLWorker.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/sql/TestDrillSQLWorker.java b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/sql/TestDrillSQLWorker.java
index cddc3ae..57c5de5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/sql/TestDrillSQLWorker.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/sql/TestDrillSQLWorker.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
 
 import org.apache.calcite.avatica.util.Quoting;
 import org.apache.calcite.sql.parser.SqlParserPos;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
 import org.apache.drill.categories.SqlTest;
 import org.apache.drill.exec.planner.physical.PlannerSettings;
 import org.junit.Test;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/pop/PopUnitTestBase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/pop/PopUnitTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/exec/pop/PopUnitTestBase.java
index b76aad3..bbe2b63 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/pop/PopUnitTestBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/pop/PopUnitTestBase.java
@@ -20,9 +20,9 @@ package org.apache.drill.exec.pop;
 import java.io.IOException;
 import java.util.Properties;
 
-import org.apache.drill.QueryTestUtil;
+import org.apache.drill.test.QueryTestUtil;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.exception.FragmentSetupException;
@@ -70,7 +70,7 @@ public abstract class PopUnitTestBase  extends ExecTest{
 
   public static Fragment getRootFragment(PhysicalPlanReader reader, String file) throws FragmentSetupException,
       IOException, ForemanSetupException {
-    return getRootFragmentFromPlanString(reader, Files.toString(FileUtils.getResourceAsFile(file), Charsets.UTF_8));
+    return getRootFragmentFromPlanString(reader, Files.toString(DrillFileUtils.getResourceAsFile(file), Charsets.UTF_8));
   }
 
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/pop/TestInjectionValue.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/pop/TestInjectionValue.java b/exec/java-exec/src/test/java/org/apache/drill/exec/pop/TestInjectionValue.java
index 2637a84..be326a2 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/pop/TestInjectionValue.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/pop/TestInjectionValue.java
@@ -23,7 +23,7 @@ import java.util.List;
 
 import org.apache.drill.categories.PlannerTest;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.ExecTest;
 import org.apache.drill.exec.physical.PhysicalPlan;
 import org.apache.drill.exec.physical.base.PhysicalOperator;
@@ -52,7 +52,7 @@ public class TestInjectionValue extends ExecTest {
   @Test
   public void testInjected() throws Exception{
     PhysicalPlanReader r = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(config);
-    PhysicalPlan p = r.readPhysicalPlan(Files.toString(FileUtils.getResourceAsFile("/physical_screen.json"), Charsets.UTF_8));
+    PhysicalPlan p = r.readPhysicalPlan(Files.toString(DrillFileUtils.getResourceAsFile("/physical_screen.json"), Charsets.UTF_8));
 
     List<PhysicalOperator> o = p.getSortedOperators(false);
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordIterator.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordIterator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordIterator.java
index b3dd4c7..c07dbaa 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordIterator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/TestRecordIterator.java
@@ -26,7 +26,7 @@ import mockit.Injectable;
 
 import org.apache.drill.categories.VectorTest;
 import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.ops.FragmentContext;
 import org.apache.drill.exec.ops.OpProfileDef;
@@ -65,7 +65,7 @@ public class TestRecordIterator extends PopUnitTestBase {
 
     final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
 
-    final String planStr = Files.toString(FileUtils.getResourceAsFile("/record/test_recorditerator.json"), Charsets.UTF_8);
+    final String planStr = Files.toString(DrillFileUtils.getResourceAsFile("/record/test_recorditerator.json"), Charsets.UTF_8);
 
     final PhysicalPlan plan = reader.readPhysicalPlan(planStr);
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);
@@ -122,7 +122,7 @@ public class TestRecordIterator extends PopUnitTestBase {
 
     final PhysicalPlanReader reader = PhysicalPlanReaderTestFactory.defaultPhysicalPlanReader(c);
 
-    final String planStr = Files.toString(FileUtils.getResourceAsFile("/record/test_recorditerator.json"), Charsets.UTF_8);
+    final String planStr = Files.toString(DrillFileUtils.getResourceAsFile("/record/test_recorditerator.json"), Charsets.UTF_8);
 
     final PhysicalPlan plan = reader.readPhysicalPlan(planStr);
     final FunctionImplementationRegistry registry = new FunctionImplementationRegistry(c);

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java
index 930bbbc..3fa9541 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestDateTypes.java
@@ -24,7 +24,7 @@ import java.util.Iterator;
 import java.util.List;
 
 import org.apache.drill.categories.VectorTest;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
 import org.apache.drill.exec.client.DrillClient;
 import org.apache.drill.exec.pop.PopUnitTestBase;
 import org.apache.drill.exec.record.RecordBatchLoader;
@@ -58,7 +58,7 @@ public class TestDateTypes extends PopUnitTestBase {
             bit.run();
             client.connect();
             List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-                    Files.toString(FileUtils.getResourceAsFile("/record/vector/test_date.json"), Charsets.UTF_8)
+                    Files.toString(DrillFileUtils.getResourceAsFile("/record/vector/test_date.json"), Charsets.UTF_8)
                             .replace("#{TEST_FILE}", "/test_simple_date.json"));
 
             RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
@@ -92,7 +92,7 @@ public class TestDateTypes extends PopUnitTestBase {
             bit.run();
             client.connect();
             List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-                    Files.toString(FileUtils.getResourceAsFile("/record/vector/test_sort_date.json"), Charsets.UTF_8)
+                    Files.toString(DrillFileUtils.getResourceAsFile("/record/vector/test_sort_date.json"), Charsets.UTF_8)
                             .replace("#{TEST_FILE}", "/test_simple_date.json"));
 
             RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
@@ -126,7 +126,7 @@ public class TestDateTypes extends PopUnitTestBase {
             bit.run();
             client.connect();
             List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-                    Files.toString(FileUtils.getResourceAsFile("/record/vector/test_timestamp.json"), Charsets.UTF_8)
+                    Files.toString(DrillFileUtils.getResourceAsFile("/record/vector/test_timestamp.json"), Charsets.UTF_8)
                             .replace("#{TEST_FILE}", "/test_simple_date.json"));
 
             RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
@@ -160,7 +160,7 @@ public class TestDateTypes extends PopUnitTestBase {
             bit.run();
             client.connect();
             List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-                    Files.toString(FileUtils.getResourceAsFile("/record/vector/test_interval.json"), Charsets.UTF_8)
+                    Files.toString(DrillFileUtils.getResourceAsFile("/record/vector/test_interval.json"), Charsets.UTF_8)
                             .replace("#{TEST_FILE}", "/test_simple_interval.json"));
 
             RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
@@ -218,7 +218,7 @@ public class TestDateTypes extends PopUnitTestBase {
             bit.run();
             client.connect();
             List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-                    Files.toString(FileUtils.getResourceAsFile("/record/vector/test_all_date_literals.json"), Charsets.UTF_8)
+                    Files.toString(DrillFileUtils.getResourceAsFile("/record/vector/test_all_date_literals.json"), Charsets.UTF_8)
                             .replace("#{TEST_FILE}", "/test_simple_date.json"));
 
             RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());
@@ -257,7 +257,7 @@ public class TestDateTypes extends PopUnitTestBase {
             bit.run();
             client.connect();
             List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-                    Files.toString(FileUtils.getResourceAsFile("/record/vector/test_date_add.json"), Charsets.UTF_8)
+                    Files.toString(DrillFileUtils.getResourceAsFile("/record/vector/test_date_add.json"), Charsets.UTF_8)
                             .replace("#{TEST_FILE}", "/test_simple_date.json"));
 
             RecordBatchLoader batchLoader = new RecordBatchLoader(bit.getContext().getAllocator());


Mime
View raw message