drill-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From prog...@apache.org
Subject [19/22] drill git commit: DRILL-5783, DRILL-5841, DRILL-5894: Rationalize test temp directories
Date Wed, 15 Nov 2017 01:47:05 GMT
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestBuilder.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/TestBuilder.java
deleted file mode 100644
index acde8ed..0000000
--- a/exec/java-exec/src/test/java/org/apache/drill/TestBuilder.java
+++ /dev/null
@@ -1,689 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.IOException;
-import java.sql.Timestamp;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.antlr.runtime.ANTLRStringStream;
-import org.antlr.runtime.CommonTokenStream;
-import org.antlr.runtime.RecognitionException;
-import org.apache.commons.lang3.tuple.Pair;
-import org.apache.drill.DrillTestWrapper.TestServices;
-import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.expression.parser.ExprLexer;
-import org.apache.drill.common.expression.parser.ExprParser;
-import org.apache.drill.common.types.TypeProtos;
-import org.apache.drill.common.types.Types;
-import org.apache.drill.exec.proto.UserBitShared;
-import org.apache.drill.exec.proto.UserBitShared.QueryType;
-import org.apache.drill.exec.proto.UserProtos.PreparedStatementHandle;
-import org.apache.drill.exec.record.BatchSchema;
-import org.apache.drill.exec.record.MaterializedField;
-import org.apache.drill.exec.util.JsonStringArrayList;
-import org.apache.drill.exec.util.JsonStringHashMap;
-import org.apache.drill.exec.util.Text;
-
-import com.google.common.base.Joiner;
-import com.google.common.base.Preconditions;
-import org.joda.time.DateTimeZone;
-
-public class TestBuilder {
-
-  /**
-   * Test query to rung. Type of object depends on the {@link #queryType}
-   */
-  private Object query;
-  // the type of query for the test
-  private UserBitShared.QueryType queryType;
-  // should the validation enforce ordering
-  private Boolean ordered;
-  private boolean approximateEquality;
-  private TestServices services;
-  // Used to pass the type information associated with particular column names rather than relying on the
-  // ordering of the columns in the CSV file, or the default type inferences when reading JSON, this is used for the
-  // case where results of the test query are adding type casts to the baseline queries, this saves a little bit of
-  // setup in cases where strict type enforcement is not necessary for a given test
-  protected Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap;
-  // queries to run before the baseline or test queries, can be used to set options
-  private String baselineOptionSettingQueries;
-  private String testOptionSettingQueries;
-  // two different methods are available for comparing ordered results, the default reads all of the records
-  // into giant lists of objects, like one giant on-heap batch of 'vectors'
-  // this flag enables the other approach which iterates through a hyper batch for the test query results and baseline
-  // while this does work faster and use less memory, it can be harder to debug as all of the elements are not in a
-  // single list
-  private boolean highPerformanceComparison;
-  // column names for use with the baseline values
-  protected String[] baselineColumns;
-  // In cases where we need to verify larger datasets without the risk of running the baseline data through
-  // the drill engine, results can be provided in a list of maps. While this model does make a lot of sense, there is a
-  // lot of work to make the type handling/casting work correctly, and making robust complex type handling work completely outside
-  // of the drill engine for generating baselines would likely be more work than it would be worth. For now we will be
-  // going with an approach of using this facility to validate the parts of the drill engine that could break in ways
-  // that would affect the reading of baseline files (i.e. we need robust test for storage engines, project and casting that
-  // use this interface) and then rely on the engine for the rest of the tests that will use the baseline queries.
-  private List<Map<String, Object>> baselineRecords;
-
-  private int expectedNumBatches = DrillTestWrapper.EXPECTED_BATCH_COUNT_NOT_SET;
-
-  public TestBuilder(TestServices services) {
-    this.services = services;
-    reset();
-  }
-
-  public TestBuilder(TestServices services, Object query, UserBitShared.QueryType queryType, Boolean ordered,
-                     boolean approximateEquality, Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap,
-                     String baselineOptionSettingQueries, String testOptionSettingQueries, boolean highPerformanceComparison,
-                     int expectedNumBatches) {
-    this(services);
-    if (ordered == null) {
-      throw new RuntimeException("Ordering not set, when using a baseline file or query you must explicitly call the ordered() or unOrdered() method on the " + this.getClass().getSimpleName());
-    }
-    this.query = query;
-    this.queryType = queryType;
-    this.ordered = ordered;
-    this.approximateEquality = approximateEquality;
-    this.baselineTypeMap = baselineTypeMap;
-    this.baselineOptionSettingQueries = baselineOptionSettingQueries;
-    this.testOptionSettingQueries = testOptionSettingQueries;
-    this.highPerformanceComparison = highPerformanceComparison;
-    this.expectedNumBatches = expectedNumBatches;
-  }
-
-  protected TestBuilder reset() {
-    query = "";
-    ordered = null;
-    approximateEquality = false;
-    highPerformanceComparison = false;
-    testOptionSettingQueries = "";
-    baselineOptionSettingQueries = "";
-    baselineRecords = null;
-    return this;
-  }
-
-  public DrillTestWrapper build() throws Exception {
-    if ( ! ordered && highPerformanceComparison ) {
-      throw new Exception("High performance comparison only available for ordered checks, to enforce this restriction, ordered() must be called first.");
-    }
-    return new DrillTestWrapper(this, services, query, queryType, baselineOptionSettingQueries, testOptionSettingQueries,
-        getValidationQueryType(), ordered, highPerformanceComparison, baselineRecords, expectedNumBatches);
-  }
-
-  public List<Pair<SchemaPath, TypeProtos.MajorType>> getExpectedSchema() {
-    return null;
-  }
-
-  public void go() throws Exception {
-    build().run();
-  }
-
-  public TestBuilder sqlQuery(String query) {
-    this.query = QueryTestUtil.normalizeQuery(query);
-    this.queryType = UserBitShared.QueryType.SQL;
-    return this;
-  }
-
-  public TestBuilder sqlQuery(String query, Object... replacements) {
-    return sqlQuery(String.format(query, replacements));
-  }
-
-  public TestBuilder preparedStatement(PreparedStatementHandle preparedStatementHandle) {
-    queryType = QueryType.PREPARED_STATEMENT;
-    query = preparedStatementHandle;
-    return this;
-  }
-
-  public TestBuilder sqlQueryFromFile(String queryFile) throws IOException {
-    String query = BaseTestQuery.getFile(queryFile);
-    this.query = query;
-    queryType = UserBitShared.QueryType.SQL;
-    return this;
-  }
-
-  public TestBuilder physicalPlanFromFile(String queryFile) throws IOException {
-    String query = BaseTestQuery.getFile(queryFile);
-    this.query = query;
-    queryType = UserBitShared.QueryType.PHYSICAL;
-    return this;
-  }
-
-  public TestBuilder ordered() {
-    ordered = true;
-    return this;
-  }
-
-  public TestBuilder unOrdered() {
-    ordered = false;
-    return this;
-  }
-
-  // this can only be used with ordered verifications, it does run faster and use less memory but may be
-  // a little harder to debug as it iterates over a hyper batch rather than reading all of the values into
-  // large on-heap lists
-  public TestBuilder highPerformanceComparison() throws Exception {
-    highPerformanceComparison = true;
-    return this;
-  }
-
-  // list of queries to run before the baseline query, can be used to set several options
-  // list takes the form of a semi-colon separated list
-  public TestBuilder optionSettingQueriesForBaseline(String queries) {
-    baselineOptionSettingQueries = queries;
-    return this;
-  }
-
-  public TestBuilder optionSettingQueriesForBaseline(String queries, Object... args) {
-    baselineOptionSettingQueries = String.format(queries, args);
-    return this;
-  }
-
-  /**
-   *  list of queries to run before the test query, can be used to set several options
-   *  list takes the form of a semi-colon separated list.
-   * @param queries queries that set session and system options
-   * @return this test builder
-   */
-
-  public TestBuilder optionSettingQueriesForTestQuery(String queries) {
-    testOptionSettingQueries = queries;
-    return this;
-  }
-
-  public TestBuilder optionSettingQueriesForTestQuery(String query, Object... args) throws Exception {
-    testOptionSettingQueries = String.format(query, args);
-    return this;
-  }
-
-  public TestBuilder approximateEquality() {
-    approximateEquality = true;
-    return this;
-  }
-
-  // modified code from SchemaPath.De class. This should be used sparingly and only in tests if absolutely needed.
-  public static SchemaPath parsePath(String path) {
-    try {
-      ExprLexer lexer = new ExprLexer(new ANTLRStringStream(path));
-      CommonTokenStream tokens = new CommonTokenStream(lexer);
-      ExprParser parser = new ExprParser(tokens);
-
-      ExprParser.parse_return ret = parser.parse();
-
-      if (ret.e instanceof SchemaPath) {
-        return (SchemaPath) ret.e;
-      } else {
-        throw new IllegalStateException("Schema path is not a valid format.");
-      }
-    } catch (RecognitionException e) {
-      throw new RuntimeException(e);
-    }
-  }
-
-  Object getValidationQuery() throws Exception {
-    throw new RuntimeException("Must provide some kind of baseline, either a baseline file or another query");
-  }
-
-  protected UserBitShared.QueryType getValidationQueryType() throws Exception {
-    if (singleExplicitBaselineRecord()) {
-      return null;
-    }
-    throw new RuntimeException("Must provide some kind of baseline, either a baseline file or another query");
-  }
-
-  public JSONTestBuilder jsonBaselineFile(String filePath) {
-    return new JSONTestBuilder(filePath, services, query, queryType, ordered, approximateEquality,
-        baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison,
-        expectedNumBatches);
-  }
-
-  public CSVTestBuilder csvBaselineFile(String filePath) {
-    return new CSVTestBuilder(filePath, services, query, queryType, ordered, approximateEquality,
-        baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison,
-        expectedNumBatches);
-  }
-
-  public SchemaTestBuilder schemaBaseLine(BatchSchema batchSchema) {
-    List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema = new ArrayList<>();
-    for (final MaterializedField field : batchSchema) {
-      expectedSchema.add(Pair.of(SchemaPath.getSimplePath(field.getName()), field.getType()));
-    }
-    return schemaBaseLine(expectedSchema);
-  }
-
-  public SchemaTestBuilder schemaBaseLine(List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema) {
-    assert expectedSchema != null : "The expected schema can be provided once";
-    assert baselineColumns == null : "The column information should be captured in expected schema, not baselineColumns";
-
-    return new SchemaTestBuilder(
-        services,
-        query,
-        queryType,
-        baselineOptionSettingQueries,
-        testOptionSettingQueries,
-        expectedSchema);
-  }
-
-  public TestBuilder baselineTypes(Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap) {
-    this.baselineTypeMap = baselineTypeMap;
-    return this;
-  }
-
-  boolean typeInfoSet() {
-    if (baselineTypeMap != null) {
-      return true;
-    } else {
-      return false;
-    }
-  }
-
-  /**
-   * Indicate that the tests query should be checked for an empty result set.
-   * @return the test builder
-   */
-  public TestBuilder expectsEmptyResultSet() {
-    unOrdered();
-    baselineRecords = new ArrayList<>();
-    return this;
-  }
-
-  /**
-   * Sets the number of expected batch count for this query. The test will fail if the query returns a different number
-   * of batches
-   *
-   * @param expectedNumBatches expected batch count
-   * @return this test builder
-   */
-  public TestBuilder expectsNumBatches(int expectedNumBatches) {
-    this.expectedNumBatches = expectedNumBatches;
-    return this;
-  }
-
-  /**
-   * This method is used to pass in a simple list of values for a single record verification without
-   * the need to create a CSV or JSON file to store the baseline.
-   *
-   * This can be called repeatedly to pass a list of records to verify. It works for both ordered and unordered
-   * checks.
-   *
-   * @param baselineValues - the baseline values to validate
-   * @return the test builder
-   */
-  public TestBuilder baselineValues(Object ... baselineValues) {
-    assert getExpectedSchema() == null : "The expected schema is not needed when baselineValues are provided ";
-    if (ordered == null) {
-      throw new RuntimeException("Ordering not set, before specifying baseline data you must explicitly call the ordered() or unOrdered() method on the " + this.getClass().getSimpleName());
-    }
-    if (baselineRecords == null) {
-      baselineRecords = new ArrayList<>();
-    }
-    Map<String, Object> ret = new HashMap<>();
-    int i = 0;
-    assertEquals("Must supply the same number of baseline values as columns.", baselineValues.length, baselineColumns.length);
-    for (String s : baselineColumns) {
-      ret.put(s, baselineValues[i]);
-      i++;
-    }
-    this.baselineRecords.add(ret);
-    return this;
-  }
-
-  /**
-   * This can be used in cases where we want to avoid issues with the assumptions made by the test framework.
-   * Most of the methods for verification in the framework run drill queries to generate the read baseline files or
-   * execute alternative baseline queries. This model relies on basic functionality of reading files with storage
-   * plugins and applying casts/projects to be stable.
-   *
-   * This method can be used to verify the engine for these cases and any other future execution paths that would
-   * be used by both the test query and baseline. Without tests like this it is possible that some tests
-   * could falsely report as passing, as both the test query and baseline query could run into the same problem
-   * with an assumed stable code path and produce the same erroneous result.
-   *
-   * @param materializedRecords - a list of maps representing materialized results
-   * @return the test builder
-   */
-  public TestBuilder baselineRecords(List<Map<String, Object>> materializedRecords) {
-    this.baselineRecords = materializedRecords;
-    return this;
-  }
-
-  /**
-   * This setting has a slightly different impact on the test depending on some of the other
-   * configuration options are set.
-   *
-   * If a JSON baseline file is given, this list will act as a project list to verify the
-   * test query against a subset of the columns in the file.
-   *
-   * For a CSV baseline file, these will act as aliases for columns [0 .. n] in the repeated
-   * varchar column that is read out of CSV.
-   *
-   * For a baseline sql query, this currently has no effect.
-   *
-   * For explicit baseline values given in java code with the baselineValues() method, these will
-   * be used to create a map for the one record verification.
-   */
-  public TestBuilder baselineColumns(String... columns) {
-    assert getExpectedSchema() == null : "The expected schema is not needed when baselineColumns are provided ";
-    for (int i = 0; i < columns.length; i++) {
-      columns[i] = parsePath(columns[i]).toExpr();
-    }
-    this.baselineColumns = columns;
-    return this;
-  }
-
-  private boolean singleExplicitBaselineRecord() {
-    return baselineRecords != null;
-  }
-
-  /**
-   * Provide a SQL query to validate against.
-   * @param baselineQuery
-   * @return the test builder
-   */
-  public BaselineQueryTestBuilder sqlBaselineQuery(Object baselineQuery) {
-    return new BaselineQueryTestBuilder(baselineQuery, UserBitShared.QueryType.SQL, services, query, queryType, ordered, approximateEquality,
-        baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison, expectedNumBatches);
-  }
-
-  public BaselineQueryTestBuilder sqlBaselineQuery(String query, String ...replacements) {
-    return sqlBaselineQuery(String.format(query, (Object[]) replacements));
-  }
-
-  // provide a path to a file containing a SQL query to use as a baseline
-  public BaselineQueryTestBuilder sqlBaselineQueryFromFile(String baselineQueryFilename) throws IOException {
-    String baselineQuery = BaseTestQuery.getFile(baselineQueryFilename);
-    return new BaselineQueryTestBuilder(baselineQuery, UserBitShared.QueryType.SQL, services, query, queryType, ordered, approximateEquality,
-        baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison, expectedNumBatches);
-  }
-
-  // as physical plans are verbose, this is the only option provided for specifying them, we should enforce
-  // that physical plans, or any large JSON strings do not live in the Java source as literals
-  public BaselineQueryTestBuilder physicalPlanBaselineQueryFromFile(String baselinePhysicalPlanPath) throws IOException {
-    String baselineQuery = BaseTestQuery.getFile(baselinePhysicalPlanPath);
-    return new BaselineQueryTestBuilder(baselineQuery, UserBitShared.QueryType.PHYSICAL, services, query, queryType, ordered, approximateEquality,
-        baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries, highPerformanceComparison, expectedNumBatches);
-  }
-
-  private String getDecimalPrecisionScaleInfo(TypeProtos.MajorType type) {
-    String precision = "";
-    switch(type.getMinorType()) {
-      case DECIMAL18:
-      case DECIMAL28SPARSE:
-      case DECIMAL38SPARSE:
-      case DECIMAL38DENSE:
-      case DECIMAL28DENSE:
-      case DECIMAL9:
-        precision = String.format("(%d,%d)", type.getPrecision(), type.getScale());
-        break;
-      default:
-        ; // do nothing empty string set above
-    }
-    return precision;
-  }
-
-  public class CSVTestBuilder extends TestBuilder {
-
-    // path to the baseline file that will be inserted into the validation query
-    private String baselineFilePath;
-    // use to cast the baseline file columns, if not set the types
-    // that come out of the test query drive interpretation of baseline
-    private TypeProtos.MajorType[] baselineTypes;
-
-    CSVTestBuilder(String baselineFile, TestServices services, Object query, UserBitShared.QueryType queryType, Boolean ordered,
-                   boolean approximateEquality, Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap,
-                   String baselineOptionSettingQueries, String testOptionSettingQueries, boolean highPerformanceComparison,
-                   int expectedNumBatches) {
-      super(services, query, queryType, ordered, approximateEquality, baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries,
-          highPerformanceComparison, expectedNumBatches);
-      this.baselineFilePath = baselineFile;
-    }
-
-    public CSVTestBuilder baselineTypes(TypeProtos.MajorType... baselineTypes) {
-      this.baselineTypes = baselineTypes;
-      this.baselineTypeMap = null;
-      return this;
-    }
-
-    // convenience method to convert minor types to major types if no decimals with precisions are needed
-    public CSVTestBuilder baselineTypes(TypeProtos.MinorType ... baselineTypes) {
-      TypeProtos.MajorType[] majorTypes = new TypeProtos.MajorType[baselineTypes.length];
-      int i = 0;
-      for(TypeProtos.MinorType minorType : baselineTypes) {
-        majorTypes[i] = Types.required(minorType);
-        i++;
-      }
-      this.baselineTypes = majorTypes;
-      this.baselineTypeMap = null;
-      return this;
-    }
-
-    @Override
-    protected TestBuilder reset() {
-      super.reset();
-      baselineTypeMap = null;
-      baselineTypes = null;
-      baselineFilePath = null;
-      return this;
-    }
-
-    @Override
-    boolean typeInfoSet() {
-      if (super.typeInfoSet() || baselineTypes != null) {
-        return true;
-      } else {
-        return false;
-      }
-    }
-
-    @Override
-    String getValidationQuery() throws Exception {
-      if (baselineColumns.length == 0) {
-        throw new Exception("Baseline CSV files require passing column names, please call the baselineColumns() method on the test builder.");
-      }
-
-      if (baselineTypes != null) {
-        assertEquals("Must pass the same number of types as column names if types are provided.", baselineTypes.length, baselineColumns.length);
-      }
-
-      String[] aliasedExpectedColumns = new String[baselineColumns.length];
-      for (int i = 0; i < baselineColumns.length; i++) {
-        aliasedExpectedColumns[i] = "columns[" + i + "] ";
-        TypeProtos.MajorType majorType;
-        if (baselineTypes != null) {
-          majorType = baselineTypes[i];
-        } else if (baselineTypeMap != null) {
-          majorType = baselineTypeMap.get(parsePath(baselineColumns[i]));
-        } else {
-          throw new Exception("Type information not set for interpreting csv baseline file.");
-        }
-        String precision = getDecimalPrecisionScaleInfo(majorType);
-        // TODO - determine if there is a better behavior here, if we do not specify a length the default behavior is
-        // to cast to varchar with length 1
-        // set default cast size for varchar, the cast function will take the lesser of this passed value and the
-        // length of the incoming data when choosing the length for the outgoing data
-        if (majorType.getMinorType() == TypeProtos.MinorType.VARCHAR ||
-            majorType.getMinorType() == TypeProtos.MinorType.VARBINARY) {
-          precision = "(65000)";
-        }
-        aliasedExpectedColumns[i] = "cast(" + aliasedExpectedColumns[i] + " as " +
-            Types.getNameOfMinorType(majorType.getMinorType()) + precision +  " ) " + baselineColumns[i];
-      }
-      String query = "select " + Joiner.on(", ").join(aliasedExpectedColumns) + " from cp.`" + baselineFilePath + "`";
-      return query;
-    }
-
-    @Override
-    protected UserBitShared.QueryType getValidationQueryType() throws Exception {
-      return UserBitShared.QueryType.SQL;
-    }
-  }
-
-  public class SchemaTestBuilder extends TestBuilder {
-    private List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema;
-    SchemaTestBuilder(TestServices services, Object query, UserBitShared.QueryType queryType,
-        String baselineOptionSettingQueries, String testOptionSettingQueries, List<Pair<SchemaPath, TypeProtos.MajorType>> expectedSchema) {
-      super(services, query, queryType, false, false, null, baselineOptionSettingQueries, testOptionSettingQueries, false, -1);
-      expectsEmptyResultSet();
-      this.expectedSchema = expectedSchema;
-    }
-
-    @Override
-    public TestBuilder baselineColumns(String... columns) {
-      assert false : "The column information should be captured in expected scheme, not baselineColumns";
-      return this;
-    }
-
-    @Override
-    public TestBuilder baselineRecords(List<Map<String, Object>> materializedRecords) {
-      assert false : "Since only schema will be compared in this test, no record is expected";
-      return this;
-    }
-
-    @Override
-    public TestBuilder baselineValues(Object... objects) {
-      assert false : "Since only schema will be compared in this test, no record is expected";
-      return this;
-    }
-
-    @Override
-    protected UserBitShared.QueryType getValidationQueryType() throws Exception {
-      return null;
-    }
-
-    @Override
-    public List<Pair<SchemaPath, TypeProtos.MajorType>> getExpectedSchema() {
-      return expectedSchema;
-    }
-  }
-
-  public class JSONTestBuilder extends TestBuilder {
-
-    // path to the baseline file that will be inserted into the validation query
-    private String baselineFilePath;
-
-    JSONTestBuilder(String baselineFile, TestServices services, Object query, UserBitShared.QueryType queryType, Boolean ordered,
-                    boolean approximateEquality, Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap,
-                    String baselineOptionSettingQueries, String testOptionSettingQueries, boolean highPerformanceComparison,
-                    int expectedNumBatches) {
-      super(services, query, queryType, ordered, approximateEquality, baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries,
-          highPerformanceComparison, expectedNumBatches);
-      this.baselineFilePath = baselineFile;
-      this.baselineColumns = new String[] {"*"};
-    }
-
-    @Override
-    String getValidationQuery() {
-      return "select " + Joiner.on(", ").join(baselineColumns) + " from cp.`" + baselineFilePath + "`";
-    }
-
-    @Override
-    protected UserBitShared.QueryType getValidationQueryType() throws Exception {
-      return UserBitShared.QueryType.SQL;
-    }
-
-  }
-
-  public class BaselineQueryTestBuilder extends TestBuilder {
-
-    /**
-     * Baseline query. Type of object depends on {@link #baselineQueryType}
-     */
-    private Object baselineQuery;
-    private UserBitShared.QueryType baselineQueryType;
-
-    BaselineQueryTestBuilder(Object baselineQuery, UserBitShared.QueryType baselineQueryType, TestServices services,
-                             Object query, UserBitShared.QueryType queryType, Boolean ordered,
-                             boolean approximateEquality, Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap,
-                             String baselineOptionSettingQueries, String testOptionSettingQueries, boolean highPerformanceComparison,
-                             int expectedNumBatches) {
-      super(services, query, queryType, ordered, approximateEquality, baselineTypeMap, baselineOptionSettingQueries, testOptionSettingQueries,
-          highPerformanceComparison, expectedNumBatches);
-      this.baselineQuery = baselineQuery;
-      this.baselineQueryType = baselineQueryType;
-    }
-
-    @Override
-    Object getValidationQuery() {
-      return baselineQuery;
-    }
-
-    @Override
-    protected UserBitShared.QueryType getValidationQueryType() throws Exception {
-      return baselineQueryType;
-    }
-
-    // This currently assumes that all explicit baseline queries will have fully qualified type information
-    // if this changes, the baseline query can be run in a sub query with the implicit or explicit type passing
-    // added on top of it, as is currently when done when reading a baseline file
-    @Override
-    boolean typeInfoSet() {
-      return true;
-    }
-
-  }
-
-  /**
-   * Convenience method to create a {@link JsonStringArrayList list} from the given values.
-   */
-  public static JsonStringArrayList<Object> listOf(Object... values) {
-    final JsonStringArrayList<Object> list = new JsonStringArrayList<>();
-    for (Object value:values) {
-      if (value instanceof CharSequence) {
-        list.add(new Text(value.toString()));
-      } else {
-        list.add(value);
-      }
-    }
-    return list;
-  }
-
-  /**
-   * Convenience method to create a {@link JsonStringHashMap<String, Object> map} instance with the given key value sequence.
-   *
-   * Key value sequence consists of key - value pairs such that a key precedes its value. For instance:
-   *
-   * mapOf("name", "Adam", "age", 41) corresponds to {"name": "Adam", "age": 41} in JSON.
-   */
-  public static JsonStringHashMap<String, Object> mapOf(Object... keyValueSequence) {
-    Preconditions.checkArgument(keyValueSequence.length%2==0, "Length of key value sequence must be even");
-    final JsonStringHashMap<String, Object> map = new JsonStringHashMap<>();
-    for (int i=0; i<keyValueSequence.length; i+=2) {
-      Object value = keyValueSequence[i+1];
-      if (value instanceof CharSequence) {
-        value = new Text(value.toString());
-      }
-      map.put(String.class.cast(keyValueSequence[i]), value);
-    }
-    return map;
-  }
-
-  /**
-   * Helper method for the timestamp values that depend on the local timezone
-   * @param value expected timestamp value in UTC
-   * @return timestamp value for the local timezone
-   */
-  public static Timestamp convertToLocalTimestamp(String value) {
-    long UTCTimestamp = Timestamp.valueOf(value).getTime();
-    return new Timestamp(DateTimeZone.getDefault().convertUTCToLocal(UTCTimestamp));
-  }
-}

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestCTASJson.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestCTASJson.java b/exec/java-exec/src/test/java/org/apache/drill/TestCTASJson.java
index c76892d..d074397 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestCTASJson.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestCTASJson.java
@@ -17,19 +17,11 @@
  */
 package org.apache.drill;
 
-
-import org.apache.drill.common.util.TestTools;
-import org.apache.drill.exec.ExecConstants;
 import org.junit.Test;
 
-import static org.junit.Assert.assertEquals;
-
 public class TestCTASJson extends PlanTestBase {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestCTASJson.class);
 
-  static final String WORKING_PATH = TestTools.getWorkingPath();
-  static final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources";
-
   @Test
   /**
    * Test a source json file that contains records that are maps with fields of all types.
@@ -37,7 +29,7 @@ public class TestCTASJson extends PlanTestBase {
    */
   public void testctas_alltypes_map() throws Exception {
     String testName = "ctas_alltypes_map";
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
     test("create table " + testName + "_json as select * from cp.`json/" + testName + ".json`");
 
     final String query = "select * from `" + testName + "_json` t1 ";
@@ -65,7 +57,7 @@ public class TestCTASJson extends PlanTestBase {
    */
   public void testctas_alltypes_map_noskip() throws Exception {
     String testName = "ctas_alltypes_map";
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
     test("create table " + testName + "_json as select * from cp.`json/" + testName + ".json`");
 
     final String query = "select * from `" + testName + "_json` t1 ";
@@ -94,7 +86,7 @@ public class TestCTASJson extends PlanTestBase {
    */
   public void testctas_alltypes_repeatedmap() throws Exception {
     String testName = "ctas_alltypes_repeated_map";
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
     test("create table " + testName + "_json as select * from cp.`json/" + testName + ".json`");
 
     final String query = "select * from `" + testName + "_json` t1 ";
@@ -124,7 +116,7 @@ public class TestCTASJson extends PlanTestBase {
    */
   public void testctas_alltypes_repeated_map_noskip() throws Exception {
     String testName = "ctas_alltypes_repeated_map";
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
     test("create table " + testName + "_json as select * from cp.`json/" + testName + ".json`");
 
     final String query = "select * from `" + testName + "_json` t1 ";

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestCTASPartitionFilter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestCTASPartitionFilter.java b/exec/java-exec/src/test/java/org/apache/drill/TestCTASPartitionFilter.java
index b0238e2..fee29e0 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestCTASPartitionFilter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestCTASPartitionFilter.java
@@ -17,20 +17,16 @@
  */
 package org.apache.drill;
 
-
-import org.apache.drill.common.util.FileUtils;
-import org.apache.drill.common.util.TestTools;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+import java.nio.file.Paths;
+
 import static org.junit.Assert.assertEquals;
 
 public class TestCTASPartitionFilter extends PlanTestBase {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestCTASPartitionFilter.class);
 
-  static final String WORKING_PATH = TestTools.getWorkingPath();
-  static final String TEST_RES_PATH = WORKING_PATH + "/src/test/resources";
-
   private static void testExcludeFilter(String query, int expectedNumFiles,
       String excludedFilterPattern, int expectedRowCount) throws Exception {
     int actualRowCount = testSql(query);
@@ -47,9 +43,14 @@ public class TestCTASPartitionFilter extends PlanTestBase {
     testPlanMatchingPatterns(query, new String[]{numFilesPattern, includedFilterPattern}, new String[]{});
   }
 
+  @BeforeClass
+  public static void setupTestFiles() {
+    dirTestWatcher.copyResourceToRoot(Paths.get("multilevel"));
+  }
+
   @Test
   public void testDrill3965() throws Exception {
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
     test("create table orders_auto_partition partition by(o_orderpriority) as select * from cp.`tpch/orders.parquet`");
     test("explain plan for select count(*) from `orders_auto_partition/1_0_1.parquet` where o_orderpriority = '5-LOW'");
   }
@@ -58,49 +59,49 @@ public class TestCTASPartitionFilter extends PlanTestBase {
   public void withDistribution() throws Exception {
     test("alter session set `planner.slice_target` = 1");
     test("alter session set `store.partition.hash_distribute` = true");
-    test("use dfs_test.tmp");
-    test(String.format("create table orders_distribution partition by (o_orderpriority) as select * from dfs_test.`%s/multilevel/parquet`", TEST_RES_PATH));
+    test("use dfs.tmp");
+    test("create table orders_distribution partition by (o_orderpriority) as select * from dfs.`multilevel/parquet`");
     String query = "select * from orders_distribution where o_orderpriority = '1-URGENT'";
-    testExcludeFilter(query, 1, "Filter", 24);
+    testExcludeFilter(query, 1, "Filter\\(", 24);
   }
 
   @Test
   public void withoutDistribution() throws Exception {
     test("alter session set `planner.slice_target` = 1");
     test("alter session set `store.partition.hash_distribute` = false");
-    test("use dfs_test.tmp");
-    test(String.format("create table orders_no_distribution partition by (o_orderpriority) as select * from dfs_test.`%s/multilevel/parquet`", TEST_RES_PATH));
+    test("use dfs.tmp");
+    test("create table orders_no_distribution partition by (o_orderpriority) as select * from dfs.`multilevel/parquet`");
     String query = "select * from orders_no_distribution where o_orderpriority = '1-URGENT'";
-    testExcludeFilter(query, 2, "Filter", 24);
+    testExcludeFilter(query, 2, "Filter\\(", 24);
   }
 
   @Test
   public void testDRILL3410() throws Exception {
     test("alter session set `planner.slice_target` = 1");
     test("alter session set `store.partition.hash_distribute` = true");
-    test("use dfs_test.tmp");
-    test(String.format("create table drill_3410 partition by (o_orderpriority) as select * from dfs_test.`%s/multilevel/parquet`", TEST_RES_PATH));
+    test("use dfs.tmp");
+    test("create table drill_3410 partition by (o_orderpriority) as select * from dfs.`multilevel/parquet`");
     String query = "select * from drill_3410 where (o_orderpriority = '1-URGENT' and o_orderkey = 10) or (o_orderpriority = '2-HIGH' or o_orderkey = 11)";
-    testIncludeFilter(query, 1, "Filter", 34);
+    testIncludeFilter(query, 1, "Filter\\(", 34);
   }
 
   @Test
   public void testDRILL3414() throws Exception {
     test("alter session set `planner.slice_target` = 1");
     test("alter session set `store.partition.hash_distribute` = true");
-    test("use dfs_test.tmp");
-    test(String.format("create table drill_3414 partition by (x, y) as select dir0 as x, dir1 as y, columns from dfs_test.`%s/multilevel/csv`", TEST_RES_PATH));
+    test("use dfs.tmp");
+    test("create table drill_3414 partition by (x, y) as select dir0 as x, dir1 as y, columns from dfs.`multilevel/csv`");
     String query = ("select * from drill_3414 where (x=1994 or y='Q1') and (x=1995 or y='Q2' or columns[0] > 5000)");
-    testIncludeFilter(query, 6, "Filter", 20);
+    testIncludeFilter(query, 6, "Filter\\(", 20);
   }
 
   @Test
   public void testDRILL3414_2() throws Exception {
     test("alter session set `planner.slice_target` = 1");
     test("alter session set `store.partition.hash_distribute` = true");
-    test("use dfs_test.tmp");
-    test(String.format("create table drill_3414_2 partition by (x, y) as select dir0 as x, dir1 as y, columns from dfs_test.`%s/multilevel/csv`", TEST_RES_PATH));
+    test("use dfs.tmp");
+    test("create table drill_3414_2 partition by (x, y) as select dir0 as x, dir1 as y, columns from dfs.`multilevel/csv`");
     String query = ("select * from drill_3414_2 where (x=1994 or y='Q1') and (x=1995 or y='Q2' or columns[0] > 5000) or columns[0] < 3000");
-    testIncludeFilter(query, 1, "Filter", 120);
+    testIncludeFilter(query, 1, "Filter\\(", 120);
   }
 }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestCaseSensitivity.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestCaseSensitivity.java b/exec/java-exec/src/test/java/org/apache/drill/TestCaseSensitivity.java
index 0a157eb..1779078 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestCaseSensitivity.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestCaseSensitivity.java
@@ -20,6 +20,7 @@ package org.apache.drill;
 
 import org.apache.drill.categories.SqlTest;
 import org.apache.drill.categories.UnlikelyTest;
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestDisabledFunctionality.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestDisabledFunctionality.java b/exec/java-exec/src/test/java/org/apache/drill/TestDisabledFunctionality.java
index cfd8fbd..4f8c16f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestDisabledFunctionality.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestDisabledFunctionality.java
@@ -18,17 +18,17 @@
 package org.apache.drill;
 import org.apache.drill.categories.UnlikelyTest;
 import org.apache.drill.common.exceptions.UserException;
-import org.apache.drill.common.util.FileUtils;
 import org.apache.drill.exec.work.ExecErrorConstants;
 import org.apache.drill.exec.work.foreman.SqlUnsupportedException;
 import org.apache.drill.exec.work.foreman.UnsupportedDataTypeException;
 import org.apache.drill.exec.work.foreman.UnsupportedFunctionException;
 import org.apache.drill.exec.work.foreman.UnsupportedRelOperatorException;
+import org.apache.drill.test.BaseTestQuery;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
 
 @Category(UnlikelyTest.class)
-public class TestDisabledFunctionality extends BaseTestQuery{
+public class TestDisabledFunctionality extends BaseTestQuery {
   static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestExampleQueries.class);
 
   @Test(expected = UserException.class)  // see DRILL-2054
@@ -263,12 +263,8 @@ public class TestDisabledFunctionality extends BaseTestQuery{
   @Test(expected = UnsupportedFunctionException.class) // see DRILL-2181
   public void testFlattenWithinGroupBy() throws Exception {
     try {
-      String root = FileUtils.getResourceAsFile("/store/text/sample.json").toURI().toString();
-      String query = String.format("select flatten(j.topping) tt " +
-          "from dfs_test.`%s` j " +
-          "group by flatten(j.topping)", root);
-
-      test(query);
+      test("select flatten(j.topping) tt " +
+        "from cp.`store/text/sample.json` j group by flatten(j.topping)");
     } catch(UserException ex) {
       throwAsUnsupportedException(ex);
       throw ex;
@@ -278,12 +274,9 @@ public class TestDisabledFunctionality extends BaseTestQuery{
   @Test(expected = UnsupportedFunctionException.class) // see DRILL-2181
   public void testFlattenWithinOrderBy() throws Exception {
     try {
-      String root = FileUtils.getResourceAsFile("/store/text/sample.json").toURI().toString();
-      String query = String.format("select flatten(j.topping) tt " +
-          "from dfs_test.`%s` j " +
-          "order by flatten(j.topping)", root);
-
-      test(query);
+      test("select flatten(j.topping) tt " +
+        "from cp.`store/text/sample.json` j " +
+        "order by flatten(j.topping)");
     } catch(UserException ex) {
       throwAsUnsupportedException(ex);
       throw ex;
@@ -293,11 +286,8 @@ public class TestDisabledFunctionality extends BaseTestQuery{
   @Test(expected = UnsupportedFunctionException.class) // see DRILL-2181
   public void testFlattenWithinAggFunction() throws Exception {
     try {
-      String root = FileUtils.getResourceAsFile("/store/text/sample.json").toURI().toString();
-      String query = String.format("select count(flatten(j.topping)) tt " +
-          "from dfs_test.`%s` j", root);
-
-      test(query);
+      test("select count(flatten(j.topping)) tt " +
+        "from cp.`store/text/sample.json` j");
     } catch(UserException ex) {
       throwAsUnsupportedException(ex);
       throw ex;
@@ -307,11 +297,8 @@ public class TestDisabledFunctionality extends BaseTestQuery{
   @Test(expected = UnsupportedFunctionException.class) // see DRILL-2181
   public void testFlattenWithinDistinct() throws Exception {
     try {
-      String root = FileUtils.getResourceAsFile("/store/text/sample.json").toURI().toString();
-      String query = String.format("select Distinct (flatten(j.topping)) tt " +
-          "from dfs_test.`%s` j", root);
-
-      test(query);
+      test("select Distinct (flatten(j.topping)) tt " +
+        "from cp.`store/text/sample.json` j");
     } catch(UserException ex) {
       throwAsUnsupportedException(ex);
       throw ex;
@@ -333,8 +320,7 @@ public class TestDisabledFunctionality extends BaseTestQuery{
   @Test (expected = UnsupportedFunctionException.class) //DRILL-3802
   public void testDisableRollup() throws Exception{
     try {
-      final String query = "select n_regionkey, count(*) as cnt from cp.`tpch/nation.parquet` group by rollup(n_regionkey, n_name)";
-      test(query);
+      test("select n_regionkey, count(*) as cnt from cp.`tpch/nation.parquet` group by rollup(n_regionkey, n_name)");
     } catch(UserException ex) {
       throwAsUnsupportedException(ex);
       throw ex;
@@ -344,8 +330,7 @@ public class TestDisabledFunctionality extends BaseTestQuery{
   @Test (expected = UnsupportedFunctionException.class) //DRILL-3802
   public void testDisableCube() throws Exception{
     try {
-      final String query = "select n_regionkey, count(*) as cnt from cp.`tpch/nation.parquet` group by cube(n_regionkey, n_name)";
-      test(query);
+      test("select n_regionkey, count(*) as cnt from cp.`tpch/nation.parquet` group by cube(n_regionkey, n_name)");
     } catch(UserException ex) {
       throwAsUnsupportedException(ex);
       throw ex;
@@ -355,8 +340,7 @@ public class TestDisabledFunctionality extends BaseTestQuery{
   @Test (expected = UnsupportedFunctionException.class) //DRILL-3802
   public void testDisableGroupingSets() throws Exception{
     try {
-      final String query = "select n_regionkey, count(*) as cnt from cp.`tpch/nation.parquet` group by grouping sets(n_regionkey, n_name)";
-      test(query);
+      test("select n_regionkey, count(*) as cnt from cp.`tpch/nation.parquet` group by grouping sets(n_regionkey, n_name)");
     } catch(UserException ex) {
       throwAsUnsupportedException(ex);
       throw ex;
@@ -366,8 +350,7 @@ public class TestDisabledFunctionality extends BaseTestQuery{
   @Test (expected = UnsupportedFunctionException.class) //DRILL-3802
   public void testDisableGrouping() throws Exception{
     try {
-      final String query = "select n_regionkey, count(*), GROUPING(n_regionkey) from cp.`tpch/nation.parquet` group by n_regionkey;";
-      test(query);
+      test("select n_regionkey, count(*), GROUPING(n_regionkey) from cp.`tpch/nation.parquet` group by n_regionkey;");
     } catch(UserException ex) {
       throwAsUnsupportedException(ex);
       throw ex;
@@ -377,8 +360,7 @@ public class TestDisabledFunctionality extends BaseTestQuery{
   @Test (expected = UnsupportedFunctionException.class) //DRILL-3802
   public void testDisableGrouping_ID() throws Exception{
     try {
-      final String query = "select n_regionkey, count(*), GROUPING_ID(n_regionkey) from cp.`tpch/nation.parquet` group by n_regionkey;";
-      test(query);
+      test("select n_regionkey, count(*), GROUPING_ID(n_regionkey) from cp.`tpch/nation.parquet` group by n_regionkey;");
     } catch(UserException ex) {
       throwAsUnsupportedException(ex);
       throw ex;
@@ -388,8 +370,7 @@ public class TestDisabledFunctionality extends BaseTestQuery{
   @Test (expected = UnsupportedFunctionException.class) //DRILL-3802
   public void testDisableGroup_ID() throws Exception{
     try {
-      final String query = "select n_regionkey, count(*), GROUP_ID() from cp.`tpch/nation.parquet` group by n_regionkey;";
-      test(query);
+      test("select n_regionkey, count(*), GROUP_ID() from cp.`tpch/nation.parquet` group by n_regionkey;");
     } catch(UserException ex) {
       throwAsUnsupportedException(ex);
       throw ex;
@@ -399,8 +380,7 @@ public class TestDisabledFunctionality extends BaseTestQuery{
   @Test (expected = UnsupportedFunctionException.class) //DRILL-3802
   public void testDisableGroupingInFilter() throws Exception{
     try {
-      final String query = "select n_regionkey, count(*) from cp.`tpch/nation.parquet` group by n_regionkey HAVING GROUPING(n_regionkey) = 1";
-      test(query);
+      test("select n_regionkey, count(*) from cp.`tpch/nation.parquet` group by n_regionkey HAVING GROUPING(n_regionkey) = 1");
     } catch(UserException ex) {
       throwAsUnsupportedException(ex);
       throw ex;

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java b/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java
index f270d1e..052b761 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestDropTable.java
@@ -37,17 +37,16 @@ public class TestDropTable extends PlanTestBase {
 
   @Test
   public void testDropJsonTable() throws Exception {
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
     test("alter session set `store.format` = 'json'");
 
     final String tableName = "simple_json";
     // create a json table
-    test(String.format(CREATE_SIMPLE_TABLE, tableName));
+    test(CREATE_SIMPLE_TABLE, tableName);
 
     // drop the table
-    final String dropSql = String.format(DROP_TABLE, tableName);
     testBuilder()
-        .sqlQuery(dropSql)
+        .sqlQuery(DROP_TABLE, tableName)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(true, String.format("Table [%s] dropped", tableName))
@@ -56,16 +55,15 @@ public class TestDropTable extends PlanTestBase {
 
   @Test
   public void testDropParquetTable() throws Exception {
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
     final String tableName = "simple_json";
 
     // create a parquet table
-    test(String.format(CREATE_SIMPLE_TABLE, tableName));
+    test(CREATE_SIMPLE_TABLE, tableName);
 
     // drop the table
-    final String dropSql = String.format(DROP_TABLE, tableName);
     testBuilder()
-        .sqlQuery(dropSql)
+        .sqlQuery(DROP_TABLE, tableName)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(true, String.format("Table [%s] dropped", tableName))
@@ -74,18 +72,16 @@ public class TestDropTable extends PlanTestBase {
 
   @Test
   public void testDropTextTable() throws Exception {
-    test("use dfs_test.tmp");
-
+    test("use dfs.tmp");
     test("alter session set `store.format` = 'csv'");
     final String csvTable = "simple_csv";
 
     // create a csv table
-    test(String.format(CREATE_SIMPLE_TABLE, csvTable));
+    test(CREATE_SIMPLE_TABLE, csvTable);
 
     // drop the table
-    String dropSql = String.format(DROP_TABLE, csvTable);
     testBuilder()
-        .sqlQuery(dropSql)
+        .sqlQuery(DROP_TABLE, csvTable)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(true, String.format("Table [%s] dropped", csvTable))
@@ -95,12 +91,11 @@ public class TestDropTable extends PlanTestBase {
     final String psvTable = "simple_psv";
 
     // create a psv table
-    test(String.format(CREATE_SIMPLE_TABLE, psvTable));
+    test(CREATE_SIMPLE_TABLE, psvTable);
 
     // drop the table
-    dropSql = String.format(DROP_TABLE, psvTable);
     testBuilder()
-        .sqlQuery(dropSql)
+        .sqlQuery(DROP_TABLE, psvTable)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(true, String.format("Table [%s] dropped", psvTable))
@@ -110,12 +105,11 @@ public class TestDropTable extends PlanTestBase {
     final String tsvTable = "simple_tsv";
 
     // create a tsv table
-    test(String.format(CREATE_SIMPLE_TABLE, tsvTable));
+    test(CREATE_SIMPLE_TABLE, tsvTable);
 
     // drop the table
-    dropSql = String.format(DROP_TABLE, tsvTable);
     testBuilder()
-        .sqlQuery(dropSql)
+        .sqlQuery(DROP_TABLE, tsvTable)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(true, String.format("Table [%s] dropped", tsvTable))
@@ -124,7 +118,7 @@ public class TestDropTable extends PlanTestBase {
 
   @Test
   public void testNonHomogenousDrop() throws Exception {
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
     final String tableName = "homogenous_table";
 
     // create a parquet table
@@ -133,14 +127,14 @@ public class TestDropTable extends PlanTestBase {
     // create a json table within the same directory
     test("alter session set `store.format` = 'json'");
     final String nestedJsonTable = tableName + Path.SEPARATOR + "json_table";
-    test(String.format(CREATE_SIMPLE_TABLE, BACK_TICK + nestedJsonTable + BACK_TICK));
+    test(CREATE_SIMPLE_TABLE, BACK_TICK + nestedJsonTable + BACK_TICK);
 
     test("show files from " + tableName);
 
     boolean dropFailed = false;
     // this should fail, because the directory contains non-homogenous files
     try {
-      test(String.format(DROP_TABLE, tableName));
+      test(DROP_TABLE, tableName);
     } catch (UserException e) {
       Assert.assertTrue(e.getMessage().contains("VALIDATION ERROR"));
       dropFailed = true;
@@ -150,7 +144,7 @@ public class TestDropTable extends PlanTestBase {
 
     // drop the individual json table
     testBuilder()
-        .sqlQuery(String.format(DROP_TABLE, BACK_TICK + nestedJsonTable + BACK_TICK))
+        .sqlQuery(DROP_TABLE, BACK_TICK + nestedJsonTable + BACK_TICK)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(true, String.format("Table [%s] dropped", nestedJsonTable))
@@ -158,7 +152,7 @@ public class TestDropTable extends PlanTestBase {
 
     // Now drop should succeed
     testBuilder()
-        .sqlQuery(String.format(DROP_TABLE, tableName))
+        .sqlQuery(DROP_TABLE, tableName)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(true, String.format("Table [%s] dropped", tableName))
@@ -169,7 +163,7 @@ public class TestDropTable extends PlanTestBase {
   public void testDropOnImmutableSchema() throws Exception {
     boolean dropFailed = false;
     try {
-      test("drop table dfs.`/tmp`");
+      test("drop table dfs.`tmp`");
     } catch (UserException e) {
       Assert.assertTrue(e.getMessage().contains("VALIDATION ERROR"));
       dropFailed = true;
@@ -182,12 +176,12 @@ public class TestDropTable extends PlanTestBase {
   @Category(UnlikelyTest.class)
   public void testDropTableIfExistsWhileTableExists() throws Exception {
     final String existentTableName = "test_table_exists";
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
 
     // successful dropping of existent table
-    test(String.format(CREATE_SIMPLE_TABLE, existentTableName));
+    test(CREATE_SIMPLE_TABLE, existentTableName);
     testBuilder()
-        .sqlQuery(String.format(DROP_TABLE_IF_EXISTS, existentTableName))
+        .sqlQuery(DROP_TABLE_IF_EXISTS, existentTableName)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(true, String.format("Table [%s] dropped", existentTableName))
@@ -198,11 +192,11 @@ public class TestDropTable extends PlanTestBase {
   @Category(UnlikelyTest.class)
   public void testDropTableIfExistsWhileTableDoesNotExist() throws Exception {
     final String nonExistentTableName = "test_table_not_exists";
-    test("use dfs_test.tmp");
+    test("use dfs.tmp");
 
     // dropping of non existent table without error
     testBuilder()
-        .sqlQuery(String.format(DROP_TABLE_IF_EXISTS, nonExistentTableName))
+        .sqlQuery(DROP_TABLE_IF_EXISTS, nonExistentTableName)
         .unOrdered()
         .baselineColumns("ok", "summary")
         .baselineValues(false, String.format("Table [%s] not found", nonExistentTableName))
@@ -214,18 +208,18 @@ public class TestDropTable extends PlanTestBase {
   public void testDropTableIfExistsWhileItIsAView() throws Exception {
     final String viewName = "test_view";
     try{
-      test("use dfs_test.tmp");
+      test("use dfs.tmp");
 
       // dropping of non existent table without error if the view with such name is existed
-      test(String.format(CREATE_SIMPLE_VIEW, viewName));
+      test(CREATE_SIMPLE_VIEW, viewName);
       testBuilder()
-          .sqlQuery(String.format(DROP_TABLE_IF_EXISTS, viewName))
+          .sqlQuery(DROP_TABLE_IF_EXISTS, viewName)
           .unOrdered()
           .baselineColumns("ok", "summary")
           .baselineValues(false, String.format("Table [%s] not found", viewName))
           .go();
     } finally {
-      test(String.format(DROP_VIEW_IF_EXISTS, viewName));
+      test(DROP_VIEW_IF_EXISTS, viewName);
     }
   }
 }

http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java b/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java
index f147f21..7fea9e1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/TestDynamicUDFSupport.java
@@ -19,12 +19,14 @@ package org.apache.drill;
 
 import com.google.common.collect.Lists;
 import mockit.Deencapsulation;
+import org.apache.commons.io.FileUtils;
+import org.apache.commons.lang3.ArrayUtils;
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.common.config.CommonConstants;
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.UserRemoteException;
-import org.apache.drill.common.util.TestTools;
+import org.apache.drill.test.TestTools;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.VersionMismatchException;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
@@ -35,24 +37,30 @@ import org.apache.drill.exec.proto.UserBitShared.Registry;
 import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.store.sys.store.DataChangeVersion;
 import org.apache.drill.exec.util.JarUtil;
+import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.TestBuilder;
 import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.junit.Before;
 import org.junit.BeforeClass;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.experimental.categories.Category;
-import org.junit.rules.TemporaryFolder;
+import org.junit.rules.TestWatcher;
+import org.junit.runner.Description;
 import org.junit.runner.RunWith;
 import org.mockito.invocation.InvocationOnMock;
 import org.mockito.runners.MockitoJUnitRunner;
 import org.mockito.stubbing.Answer;
 
+import java.io.File;
 import java.io.IOException;
+import java.net.URI;
+import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.util.List;
 import java.util.Properties;
 import java.util.concurrent.CountDownLatch;
 
+import static org.apache.drill.test.HadoopUtils.hadoopToJavaPath;
 import static org.hamcrest.CoreMatchers.containsString;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
@@ -72,29 +80,52 @@ import static org.mockito.Mockito.verify;
 @Category({SlowTest.class, SqlFunctionTest.class})
 public class TestDynamicUDFSupport extends BaseTestQuery {
 
-  private static final Path jars = new Path(TestTools.getWorkingPath(), "src/test/resources/jars");
+  private static final Path jars = TestTools.WORKING_PATH
+    .resolve(TestTools.TEST_RESOURCES)
+    .resolve("jars");
   private static final String default_binary_name = "DrillUDF-1.0.jar";
+  private static final String UDF_SUB_DIR = "udf";
   private static final String default_source_name = JarUtil.getSourceName(default_binary_name);
-
-  @Rule
-  public final TemporaryFolder base = new TemporaryFolder();
-
-  private static FileSystem localFileSystem;
+  private static URI fsUri;
+  private static File udfDir;
 
   @BeforeClass
-  public static void init() throws IOException {
-    localFileSystem = getLocalFileSystem();
-  }
+  public static void setup() throws IOException {
+    udfDir = dirTestWatcher.makeSubDir(Paths.get(UDF_SUB_DIR));
 
-  @Before
-  public void setup() {
     Properties overrideProps = new Properties();
-    overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_ROOT, base.getRoot().getPath());
-    overrideProps.setProperty(ExecConstants.DRILL_TMP_DIR, base.getRoot().getPath());
+    overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_ROOT, udfDir.getAbsolutePath());
     overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_FS, FileSystem.DEFAULT_FS);
     updateTestCluster(1, DrillConfig.create(overrideProps));
+
+    fsUri = getLocalFileSystem().getUri();
   }
 
+  @Rule
+  public final TestWatcher clearDirs = new TestWatcher() {
+    @Override
+    protected void succeeded(Description description) {
+      reset();
+    }
+
+    @Override
+    protected void failed(Throwable e, Description description) {
+      reset();
+    }
+
+    private void reset() {
+      try {
+        closeClient();
+        FileUtils.cleanDirectory(udfDir);
+        dirTestWatcher.clear();
+        setupDefaultTestCluster();
+        setup();
+      } catch (Exception e) {
+        throw new RuntimeException(e);
+      }
+    }
+  };
+
   @Test
   public void testSyntax() throws Exception {
     test("create function using jar 'jar_name.jar'");
@@ -132,11 +163,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testAbsentBinaryInStaging() throws Exception {
-    Path staging = getDrillbitContext().getRemoteFunctionRegistry().getStagingArea();
-    FileSystem fs = getDrillbitContext().getRemoteFunctionRegistry().getFs();
+    final Path staging = hadoopToJavaPath(getDrillbitContext().getRemoteFunctionRegistry().getStagingArea());
 
     String summary = String.format("File %s does not exist on file system %s",
-        new Path(staging, default_binary_name).toUri().getPath(), fs.getUri());
+        staging.resolve(default_binary_name).toUri().getPath(), fsUri);
 
     testBuilder()
         .sqlQuery("create function using jar '%s'", default_binary_name)
@@ -148,12 +178,12 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testAbsentSourceInStaging() throws Exception {
-    Path staging = getDrillbitContext().getRemoteFunctionRegistry().getStagingArea();
-    FileSystem fs = getDrillbitContext().getRemoteFunctionRegistry().getFs();
-    copyJar(fs, jars, staging, default_binary_name);
+    final Path staging = hadoopToJavaPath(getDrillbitContext().getRemoteFunctionRegistry().getStagingArea());
+
+    copyJar(jars, staging, default_binary_name);
 
     String summary = String.format("File %s does not exist on file system %s",
-        new Path(staging, default_source_name).toUri().getPath(), fs.getUri());
+        staging.resolve(default_source_name).toUri().getPath(), fsUri);
 
     testBuilder()
         .sqlQuery("create function using jar '%s'", default_binary_name)
@@ -214,10 +244,12 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     assertFalse("Staging area should be empty", fs.listFiles(remoteFunctionRegistry.getStagingArea(), false).hasNext());
     assertFalse("Temporary area should be empty", fs.listFiles(remoteFunctionRegistry.getTmpArea(), false).hasNext());
 
+    final Path path = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+
     assertTrue("Binary should be present in registry area",
-        fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_binary_name)));
+      path.resolve(default_binary_name).toFile().exists());
     assertTrue("Source should be present in registry area",
-        fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_source_name)));
+      path.resolve(default_source_name).toFile().exists());
 
     Registry registry = remoteFunctionRegistry.getRegistry(new DataChangeVersion());
     assertEquals("Registry should contain one jar", registry.getJarList().size(), 1);
@@ -243,8 +275,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
   @Test
   public void testDuplicatedJarInLocalRegistry() throws Exception {
     copyDefaultJarsToStagingArea();
+
     test("create function using jar '%s'", default_binary_name);
     test("select custom_lower('A') from (values(1))");
+
     copyDefaultJarsToStagingArea();
 
     String summary = "Jar with %s name has been already registered";
@@ -291,7 +325,11 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testSuccessfulRegistrationAfterSeveralRetryAttempts() throws Exception {
-    RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+    final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+    final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+    final Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
+    final Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
+
     copyDefaultJarsToStagingArea();
 
     doThrow(new VersionMismatchException("Version mismatch detected", 1))
@@ -312,15 +350,13 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     verify(remoteFunctionRegistry, times(3))
             .updateRegistry(any(Registry.class), any(DataChangeVersion.class));
 
-    FileSystem fs = remoteFunctionRegistry.getFs();
-
-    assertFalse("Staging area should be empty", fs.listFiles(remoteFunctionRegistry.getStagingArea(), false).hasNext());
-    assertFalse("Temporary area should be empty", fs.listFiles(remoteFunctionRegistry.getTmpArea(), false).hasNext());
+    assertTrue("Staging area should be empty", ArrayUtils.isEmpty(stagingPath.toFile().listFiles()));
+    assertTrue("Temporary area should be empty", ArrayUtils.isEmpty(tmpPath.toFile().listFiles()));
 
     assertTrue("Binary should be present in registry area",
-            fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_binary_name)));
+      registryPath.resolve(default_binary_name).toFile().exists());
     assertTrue("Source should be present in registry area",
-            fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_source_name)));
+      registryPath.resolve(default_source_name).toFile().exists());
 
     Registry registry = remoteFunctionRegistry.getRegistry(new DataChangeVersion());
     assertEquals("Registry should contain one jar", registry.getJarList().size(), 1);
@@ -361,7 +397,11 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testExceedRetryAttemptsDuringRegistration() throws Exception {
-    RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+    final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+    final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+    final Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
+    final Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
+
     copyDefaultJarsToStagingArea();
 
     doThrow(new VersionMismatchException("Version mismatch detected", 1))
@@ -379,17 +419,13 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     verify(remoteFunctionRegistry, times(remoteFunctionRegistry.getRetryAttempts() + 1))
         .updateRegistry(any(Registry.class), any(DataChangeVersion.class));
 
-    FileSystem fs = remoteFunctionRegistry.getFs();
-
     assertTrue("Binary should be present in staging area",
-            fs.exists(new Path(remoteFunctionRegistry.getStagingArea(), default_binary_name)));
+            stagingPath.resolve(default_binary_name).toFile().exists());
     assertTrue("Source should be present in staging area",
-            fs.exists(new Path(remoteFunctionRegistry.getStagingArea(), default_source_name)));
+            stagingPath.resolve(default_source_name).toFile().exists());
 
-    assertFalse("Registry area should be empty",
-        fs.listFiles(remoteFunctionRegistry.getRegistryArea(), false).hasNext());
-    assertFalse("Temporary area should be empty",
-        fs.listFiles(remoteFunctionRegistry.getTmpArea(), false).hasNext());
+    assertTrue("Registry area should be empty", ArrayUtils.isEmpty(registryPath.toFile().listFiles()));
+    assertTrue("Temporary area should be empty", ArrayUtils.isEmpty(tmpPath.toFile().listFiles()));
 
     assertEquals("Registry should be empty",
         remoteFunctionRegistry.getRegistry(new DataChangeVersion()).getJarList().size(), 0);
@@ -397,7 +433,9 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   @Test
   public void testExceedRetryAttemptsDuringUnregistration() throws Exception {
-    RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+    final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
+    final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+
     copyDefaultJarsToStagingArea();
     test("create function using jar '%s'", default_binary_name);
 
@@ -417,12 +455,10 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     verify(remoteFunctionRegistry, times(remoteFunctionRegistry.getRetryAttempts() + 1))
         .updateRegistry(any(Registry.class), any(DataChangeVersion.class));
 
-    FileSystem fs = remoteFunctionRegistry.getFs();
-
     assertTrue("Binary should be present in registry area",
-            fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_binary_name)));
+      registryPath.resolve(default_binary_name).toFile().exists());
     assertTrue("Source should be present in registry area",
-            fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_source_name)));
+      registryPath.resolve(default_source_name).toFile().exists());
 
     Registry registry = remoteFunctionRegistry.getRegistry(new DataChangeVersion());
     assertEquals("Registry should contain one jar", registry.getJarList().size(), 1);
@@ -446,13 +482,13 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
         .baselineValues("a")
         .go();
 
-    Path localUdfDirPath = Deencapsulation.getField(
-        getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir");
+    Path localUdfDirPath = hadoopToJavaPath((org.apache.hadoop.fs.Path) Deencapsulation.getField(
+        getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir"));
 
     assertTrue("Binary should exist in local udf directory",
-        localFileSystem.exists(new Path(localUdfDirPath, default_binary_name)));
+      localUdfDirPath.resolve(default_binary_name).toFile().exists());
     assertTrue("Source should exist in local udf directory",
-        localFileSystem.exists(new Path(localUdfDirPath, default_source_name)));
+      localUdfDirPath.resolve(default_source_name).toFile().exists());
   }
 
   @Test
@@ -513,13 +549,13 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
     test("create function using jar '%s'", default_binary_name);
     test("select custom_lower('A') from (values(1))");
 
-    Path localUdfDirPath = Deencapsulation.getField(
-        getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir");
+    Path localUdfDirPath = hadoopToJavaPath((org.apache.hadoop.fs.Path)Deencapsulation.getField(
+      getDrillbitContext().getFunctionImplementationRegistry(), "localUdfDir"));
 
     assertTrue("Binary should exist in local udf directory",
-        localFileSystem.exists(new Path(localUdfDirPath, default_binary_name)));
+      localUdfDirPath.resolve(default_binary_name).toFile().exists());
     assertTrue("Source should exist in local udf directory",
-        localFileSystem.exists(new Path(localUdfDirPath, default_source_name)));
+      localUdfDirPath.resolve(default_source_name).toFile().exists());
 
     String summary = "The following UDFs in jar %s have been unregistered:\n" +
         "[custom_lower(VARCHAR-REQUIRED)]";
@@ -537,20 +573,21 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
       assertThat(e.getMessage(), containsString("No match found for function signature custom_lower(<CHARACTER>)"));
     }
 
-    RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
+    final RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
+    final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+
     assertEquals("Remote registry should be empty",
         remoteFunctionRegistry.getRegistry(new DataChangeVersion()).getJarList().size(), 0);
 
-    FileSystem fs = remoteFunctionRegistry.getFs();
     assertFalse("Binary should not be present in registry area",
-        fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_binary_name)));
+      registryPath.resolve(default_binary_name).toFile().exists());
     assertFalse("Source should not be present in registry area",
-        fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_source_name)));
+      registryPath.resolve(default_source_name).toFile().exists());
 
     assertFalse("Binary should not be present in local udf directory",
-        localFileSystem.exists(new Path(localUdfDirPath, default_binary_name)));
+      localUdfDirPath.resolve(default_binary_name).toFile().exists());
     assertFalse("Source should not be present in local udf directory",
-        localFileSystem.exists(new Path(localUdfDirPath, default_source_name)));
+      localUdfDirPath.resolve(default_source_name).toFile().exists());
   }
 
   @Test
@@ -567,7 +604,7 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
     Thread.sleep(1000);
 
-    Path src = new Path(jars, "v2");
+    Path src = jars.resolve("v2");
     copyJarsToStagingArea(src, default_binary_name, default_source_name);
     test("create function using jar '%s'", default_binary_name);
     testBuilder()
@@ -593,15 +630,18 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
   @Test
   public void testRegistrationFailDuringRegistryUpdate() throws Exception {
     final RemoteFunctionRegistry remoteFunctionRegistry = spyRemoteFunctionRegistry();
-    final FileSystem fs = remoteFunctionRegistry.getFs();
+    final Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
+    final Path stagingPath = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
+    final Path tmpPath = hadoopToJavaPath(remoteFunctionRegistry.getTmpArea());
+
     final String errorMessage = "Failure during remote registry update.";
     doAnswer(new Answer<Void>() {
       @Override
       public Void answer(InvocationOnMock invocation) throws Throwable {
         assertTrue("Binary should be present in registry area",
-            fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_binary_name)));
+            registryPath.resolve(default_binary_name).toFile().exists());
         assertTrue("Source should be present in registry area",
-            fs.exists(new Path(remoteFunctionRegistry.getRegistryArea(), default_source_name)));
+            registryPath.resolve(default_source_name).toFile().exists());
         throw new RuntimeException(errorMessage);
       }
     }).when(remoteFunctionRegistry).updateRegistry(any(Registry.class), any(DataChangeVersion.class));
@@ -615,15 +655,11 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
         .baselineValues(false, errorMessage)
         .go();
 
-    assertFalse("Registry area should be empty",
-        fs.listFiles(remoteFunctionRegistry.getRegistryArea(), false).hasNext());
-    assertFalse("Temporary area should be empty",
-        fs.listFiles(remoteFunctionRegistry.getTmpArea(), false).hasNext());
+    assertTrue("Registry area should be empty", ArrayUtils.isEmpty(registryPath.toFile().listFiles()));
+    assertTrue("Temporary area should be empty", ArrayUtils.isEmpty(tmpPath.toFile().listFiles()));
 
-    assertTrue("Binary should be present in staging area",
-        fs.exists(new Path(remoteFunctionRegistry.getStagingArea(), default_binary_name)));
-    assertTrue("Source should be present in staging area",
-        fs.exists(new Path(remoteFunctionRegistry.getStagingArea(), default_source_name)));
+    assertTrue("Binary should be present in staging area", stagingPath.resolve(default_binary_name).toFile().exists());
+    assertTrue("Source should be present in staging area", stagingPath.resolve(default_source_name).toFile().exists());
   }
 
   @Test
@@ -914,13 +950,17 @@ public class TestDynamicUDFSupport extends BaseTestQuery {
 
   private void copyJarsToStagingArea(Path src, String binaryName, String sourceName) throws IOException {
     RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
-    copyJar(remoteFunctionRegistry.getFs(), src, remoteFunctionRegistry.getStagingArea(), binaryName);
-    copyJar(remoteFunctionRegistry.getFs(), src, remoteFunctionRegistry.getStagingArea(), sourceName);
+
+    final Path path = hadoopToJavaPath(remoteFunctionRegistry.getStagingArea());
+
+    copyJar(src, path, binaryName);
+    copyJar(src, path, sourceName);
   }
 
-  private void copyJar(FileSystem fs, Path src, Path dest, String name) throws IOException {
-    Path jarPath = new Path(src, name);
-    fs.copyFromLocalFile(jarPath, dest);
+  private void copyJar(Path src, Path dest, String name) throws IOException {
+    final File destFile = dest.resolve(name).toFile();
+    FileUtils.deleteQuietly(destFile);
+    FileUtils.copyFile(src.resolve(name).toFile(), destFile);
   }
 
   private RemoteFunctionRegistry spyRemoteFunctionRegistry() {


Mime
View raw message