http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/testing/TestResourceLeak.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/testing/TestResourceLeak.java b/exec/java-exec/src/test/java/org/apache/drill/exec/testing/TestResourceLeak.java
index d2f2590..94c8ebf 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/testing/TestResourceLeak.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/testing/TestResourceLeak.java
@@ -26,7 +26,7 @@ import java.util.Properties;
import javax.inject.Inject;
-import org.apache.drill.QueryTestUtil;
+import org.apache.drill.test.QueryTestUtil;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.common.exceptions.UserRemoteException;
import org.apache.drill.exec.ExecConstants;
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/TestEmptyPopulation.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/TestEmptyPopulation.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/TestEmptyPopulation.java
index 5bb6815..8427d49 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/TestEmptyPopulation.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/TestEmptyPopulation.java
@@ -17,10 +17,10 @@
*/
package org.apache.drill.exec.vector.complex;
-import static org.apache.drill.TestBuilder.listOf;
-import static org.apache.drill.TestBuilder.mapOf;
+import static org.apache.drill.test.TestBuilder.listOf;
+import static org.apache.drill.test.TestBuilder.mapOf;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.common.config.DrillConfig;
import org.apache.drill.exec.memory.BufferAllocator;
import org.apache.drill.exec.memory.RootAllocatorFactory;
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/fn/TestJsonReaderWithSparseFiles.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/fn/TestJsonReaderWithSparseFiles.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/fn/TestJsonReaderWithSparseFiles.java
index 808d98c..fafabeb 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/fn/TestJsonReaderWithSparseFiles.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/fn/TestJsonReaderWithSparseFiles.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
import java.util.List;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.exec.record.RecordBatchLoader;
import org.apache.drill.exec.rpc.user.QueryDataBatch;
import org.apache.drill.exec.util.JsonStringArrayList;
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexToJson.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexToJson.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexToJson.java
index 656429e..db3907d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexToJson.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexToJson.java
@@ -22,12 +22,13 @@ import static org.junit.Assert.assertTrue;
import java.util.List;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.common.types.TypeProtos.DataMode;
import org.apache.drill.exec.client.DrillClient;
import org.apache.drill.exec.proto.UserBitShared.RecordBatchDef;
import org.apache.drill.exec.record.RecordBatchLoader;
import org.apache.drill.exec.rpc.user.QueryDataBatch;
+import org.junit.BeforeClass;
import org.junit.Test;
public class TestComplexToJson extends BaseTestQuery {
@@ -42,7 +43,7 @@ public class TestComplexToJson extends BaseTestQuery {
client = new DrillClient(config, serviceSet.getCoordinator());
client.setSupportComplexTypes(false);
client.connect();
- results = testSqlWithResults("select * from dfs_test.`[WORKING_PATH]/src/test/resources/store/text/data/regions.csv`");
+ results = testSqlWithResults("select * from cp.`store/text/data/regions.csv`");
loader.load(results.get(0).getHeader().getDef(), results.get(0).getData());
RecordBatchDef def = results.get(0).getHeader().getDef();
// the entire row is returned as a single column
@@ -58,7 +59,7 @@ public class TestComplexToJson extends BaseTestQuery {
client = new DrillClient(config, serviceSet.getCoordinator());
client.setSupportComplexTypes(true);
client.connect();
- results = testSqlWithResults("select * from dfs_test.`[WORKING_PATH]/src/test/resources/store/text/data/regions.csv`");
+ results = testSqlWithResults("select * from cp.`store/text/data/regions.csv`");
loader.load(results.get(0).getHeader().getDef(), results.get(0).getData());
def = results.get(0).getHeader().getDef();
// the entire row is returned as a single column
@@ -73,5 +74,4 @@ public class TestComplexToJson extends BaseTestQuery {
client = parent_client;
}
-
}
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeReader.java
index 67b2e5a..5badbdb 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeReader.java
@@ -18,8 +18,8 @@
package org.apache.drill.exec.vector.complex.writer;
-import org.apache.drill.BaseTestQuery;
-import org.apache.drill.TestBuilder;
+import org.apache.drill.test.BaseTestQuery;
+import org.apache.drill.test.TestBuilder;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
@@ -30,13 +30,10 @@ import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Files;
import java.nio.file.Path;
-import java.nio.file.Paths;
-import static org.apache.drill.TestBuilder.listOf;
-
-public class TestComplexTypeReader extends BaseTestQuery{
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestComplexTypeReader.class);
+import static org.apache.drill.test.TestBuilder.listOf;
+public class TestComplexTypeReader extends BaseTestQuery {
@BeforeClass
public static void init() throws Exception {
testNoResult("alter session set `exec.enable_union_type` = true");
@@ -243,13 +240,11 @@ public class TestComplexTypeReader extends BaseTestQuery{
@Test //DRILL-2872.
public void testRepeatedJson() throws Exception {
-
- final String query="select cast(convert_to(interests, 'JSON') as varchar(0)) as interests from cp.`complex_student.json`";
testBuilder()
- .sqlQuery(query)
- .unOrdered()
- .jsonBaselineFile("DRILL-2872-result.json")
- .go();
+ .sqlQuery("select cast(convert_to(interests, 'JSON') as varchar(0)) as interests from cp.`complex_student.json`")
+ .unOrdered()
+ .jsonBaselineFile("DRILL-2872-result.json")
+ .go();
}
@Test // DRILL-4410
@@ -258,19 +253,18 @@ public class TestComplexTypeReader extends BaseTestQuery{
long numRecords = 100000;
- String tempDir = BaseTestQuery.getTempDir("ComplexTypeWriter");
- String file1 = tempDir + TestComplexTypeReader.class.getName() + "arrays1.json";
- String file2 = tempDir + TestComplexTypeReader.class.getName() + "arrays2.json";
- Path path1 = Paths.get(file1);
- Path path2 = Paths.get(file2);
+ final String file1 = "arrays1.json";
+ final String file2 = "arrays2.json";
+
+ final Path path1 = dirTestWatcher.getRootDir().toPath().resolve(file1);
+ final Path path2 = dirTestWatcher.getRootDir().toPath().resolve(file2);
- String arrayString = "[ \"abcdef\", \"ghijkl\", \"mnopqr\", \"stuvwx\", \"yz1234\", \"567890\" ] ";
- Files.deleteIfExists(path1);
- Files.deleteIfExists(path2);
Files.createFile(path1);
Files.createFile(path2);
- try(PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file1, true)))) {
+ final String arrayString = "[ \"abcdef\", \"ghijkl\", \"mnopqr\", \"stuvwx\", \"yz1234\", \"567890\" ] ";
+
+ try(PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(path1.toFile(), true)))) {
for (long i = 0; i < numRecords; i++) {
out.println("{ \"id\" : " + i + ", \"array\" : " + arrayString + "}");
}
@@ -278,7 +272,7 @@ public class TestComplexTypeReader extends BaseTestQuery{
throw e;
}
- try(PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file2, true)))) {
+ try(PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(path2.toFile(), true)))) {
for (long i = 0; i < numRecords; i++) {
out.println("{ \"id\" : " + i + ", \"array\" : " + arrayString + "}");
}
@@ -286,27 +280,24 @@ public class TestComplexTypeReader extends BaseTestQuery{
throw e;
}
- String queryString = "select * from dfs.`" + file1 + "` `arrays1` INNER JOIN dfs.`" + file2 + "` `arrays2` ON "
- + "(`arrays1`.id = `arrays2`.id)";
- TestBuilder testBuilder = testBuilder().sqlQuery(queryString).unOrdered();
- testBuilder.baselineColumns("id", "id0", "array", "array0");
+ TestBuilder testBuilder = testBuilder()
+ .sqlQuery("select * from dfs.`%s` `arrays1` INNER JOIN dfs.`%s` `arrays2` ON "
+ + "(`arrays1`.id = `arrays2`.id)", file1, file2)
+ .unOrdered()
+ .baselineColumns("id", "id0", "array", "array0");
+
for (long i = 0; i < numRecords; i++) {
testBuilder.baselineValues(i, i, listOf("abcdef", "ghijkl", "mnopqr", "stuvwx", "yz1234", "567890"),
listOf("abcdef", "ghijkl", "mnopqr", "stuvwx", "yz1234", "567890"));
}
testBuilder.go();
-
- Files.deleteIfExists(path1);
- Files.deleteIfExists(path2);
}
@Test
public void testNonExistentFieldConverting() throws Exception {
- String query = "select employee_id, convert_to(`complex_field`, 'JSON') as complex_field from cp.`employee.json` " +
- "where employee_id = 1";
-
testBuilder()
- .sqlQuery(query)
+ .sqlQuery("select employee_id, convert_to(`complex_field`, 'JSON') as complex_field from cp.`employee.json` " +
+ "where employee_id = 1")
.unOrdered()
.baselineColumns("employee_id", "complex_field")
.baselineValues(1L, null)
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java
index cb7bef2..3c0e1db 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestComplexTypeWriter.java
@@ -18,7 +18,7 @@
package org.apache.drill.exec.vector.complex.writer;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
import org.junit.Test;
public class TestComplexTypeWriter extends BaseTestQuery{
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
index 58793a2..b6562bb 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestExtendedTypes.java
@@ -18,47 +18,46 @@
package org.apache.drill.exec.vector.complex.writer;
import mockit.integration.junit4.JMockit;
-import org.apache.drill.BaseTestQuery;
-import org.apache.drill.common.util.TestTools;
+import static org.junit.Assert.assertEquals;
+
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.List;
+
+import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.rpc.user.QueryDataBatch;
-import org.apache.drill.test.OperatorFixture;
import org.junit.Assert;
+import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.runner.RunWith;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.List;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import static org.junit.Assert.assertEquals;
@RunWith(JMockit.class)
public class TestExtendedTypes extends BaseTestQuery {
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestExtendedTypes.class);
+ @BeforeClass
+ public static void setupTestFiles() {
+ dirTestWatcher.copyResourceToRoot(Paths.get("vector", "complex"));
+ }
@Test
public void checkReadWriteExtended() throws Exception {
mockUtcDateTimeZone();
- final String originalFile = "${WORKING_PATH}/src/test/resources/vector/complex/extended.json".replaceAll(
- Pattern.quote("${WORKING_PATH}"),
- Matcher.quoteReplacement(TestTools.getWorkingPath()));
-
+ final String originalFile = "vector/complex/extended.json";
final String newTable = "TestExtendedTypes/newjson";
+
try {
testNoResult(String.format("ALTER SESSION SET `%s` = 'json'", ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName()));
testNoResult(String.format("ALTER SESSION SET `%s` = true", ExecConstants.JSON_EXTENDED_TYPES.getOptionName()));
// create table
- test("create table dfs_test.tmp.`%s` as select * from dfs.`%s`", newTable, originalFile);
-
+ test("create table dfs.tmp.`%s` as select * from cp.`%s`", newTable, originalFile);
// check query of table.
- test("select * from dfs_test.tmp.`%s`", newTable);
+ test("select * from dfs.tmp.`%s`", newTable);
+
// check that original file and new file match.
- final byte[] originalData = Files.readAllBytes(Paths.get(originalFile));
- final byte[] newData = Files.readAllBytes(Paths.get(BaseTestQuery.getDfsTestTmpSchemaLocation() + '/' + newTable
- + "/0_0_0.json"));
+ final byte[] originalData = Files.readAllBytes(dirTestWatcher.getRootDir().toPath().resolve(originalFile));
+ final byte[] newData = Files.readAllBytes(dirTestWatcher.getDfsTestTmpDir().toPath().resolve(Paths.get(newTable, "0_0_0.json")));
assertEquals(new String(originalData), new String(newData));
} finally {
resetSessionOption(ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName());
@@ -68,16 +67,13 @@ public class TestExtendedTypes extends BaseTestQuery {
@Test
public void testMongoExtendedTypes() throws Exception {
-
- final String originalFile = "${WORKING_PATH}/src/test/resources/vector/complex/mongo_extended.json".replaceAll(
- Pattern.quote("${WORKING_PATH}"),
- Matcher.quoteReplacement(TestTools.getWorkingPath()));
+ final String originalFile = "vector/complex/mongo_extended.json";
try {
testNoResult(String.format("ALTER SESSION SET `%s` = 'json'", ExecConstants.OUTPUT_FORMAT_VALIDATOR.getOptionName()));
testNoResult(String.format("ALTER SESSION SET `%s` = true", ExecConstants.JSON_EXTENDED_TYPES.getOptionName()));
- int actualRecordCount = testSql(String.format("select * from dfs.`%s`", originalFile));
+ int actualRecordCount = testSql(String.format("select * from cp.`%s`", originalFile));
assertEquals(
String.format(
"Received unexpected number of rows in output: expected=%d, received=%s",
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
index 9064c5c..3e3580f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/vector/complex/writer/TestJsonReader.java
@@ -17,8 +17,8 @@
*/
package org.apache.drill.exec.vector.complex.writer;
-import static org.apache.drill.TestBuilder.listOf;
-import static org.apache.drill.TestBuilder.mapOf;
+import static org.apache.drill.test.TestBuilder.listOf;
+import static org.apache.drill.test.TestBuilder.mapOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@@ -31,12 +31,13 @@ import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
+import java.nio.file.Paths;
import java.util.List;
import java.util.zip.GZIPOutputStream;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.proto.UserBitShared;
import org.apache.drill.exec.record.RecordBatchLoader;
@@ -45,30 +46,28 @@ import org.apache.drill.exec.rpc.user.QueryDataBatch;
import org.apache.drill.exec.store.easy.json.JSONRecordReader;
import org.apache.drill.exec.vector.IntVector;
import org.apache.drill.exec.vector.RepeatedBigIntVector;
+import org.junit.BeforeClass;
import org.junit.Ignore;
-import org.junit.Rule;
import org.junit.Test;
import com.google.common.base.Charsets;
import com.google.common.io.Files;
-import org.junit.rules.TemporaryFolder;
-
public class TestJsonReader extends BaseTestQuery {
-// private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestJsonReader.class);
-
private static final boolean VERBOSE_DEBUG = false;
- @Rule
- public TemporaryFolder folder = new TemporaryFolder();
+ @BeforeClass
+ public static void setupTestFiles() {
+ dirTestWatcher.copyResourceToRoot(Paths.get("store", "json"));
+ dirTestWatcher.copyResourceToRoot(Paths.get("vector","complex", "writer"));
+ }
@Test
public void testEmptyList() throws Exception {
- String root = FileUtils.getResourceAsFile("/store/json/emptyLists").toURI().toString();
- String query = String.format("select count(a[0]) as ct from dfs_test.`%s`", root, root);
+ final String root = "store/json/emptyLists";
testBuilder()
- .sqlQuery(query)
+ .sqlQuery("select count(a[0]) as ct from dfs.`%s`", root, root)
.ordered()
.baselineColumns("ct")
.baselineValues(6l)
@@ -78,7 +77,7 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void schemaChange() throws Exception {
- test("select b from dfs.`${WORKING_PATH}/src/test/resources/vector/complex/writer/schemaChange/`");
+ test("select b from dfs.`vector/complex/writer/schemaChange/`");
}
@Test
@@ -116,16 +115,16 @@ public class TestJsonReader extends BaseTestQuery {
public void testSplitAndTransferFailure() throws Exception {
final String testVal = "a string";
testBuilder()
- .sqlQuery("select flatten(config) as flat from cp.`/store/json/null_list.json`")
+ .sqlQuery("select flatten(config) as flat from cp.`store/json/null_list.json`")
.ordered()
.baselineColumns("flat")
.baselineValues(listOf())
.baselineValues(listOf(testVal))
.go();
- test("select flatten(config) as flat from cp.`/store/json/null_list_v2.json`");
+ test("select flatten(config) as flat from cp.`store/json/null_list_v2.json`");
testBuilder()
- .sqlQuery("select flatten(config) as flat from cp.`/store/json/null_list_v2.json`")
+ .sqlQuery("select flatten(config) as flat from cp.`store/json/null_list_v2.json`")
.ordered()
.baselineColumns("flat")
.baselineValues(mapOf("repeated_varchar", listOf()))
@@ -133,7 +132,7 @@ public class TestJsonReader extends BaseTestQuery {
.go();
testBuilder()
- .sqlQuery("select flatten(config) as flat from cp.`/store/json/null_list_v3.json`")
+ .sqlQuery("select flatten(config) as flat from cp.`store/json/null_list_v3.json`")
.ordered()
.baselineColumns("flat")
.baselineValues(mapOf("repeated_map", listOf(mapOf("repeated_varchar", listOf()))))
@@ -144,10 +143,10 @@ public class TestJsonReader extends BaseTestQuery {
@Test
@Ignore("DRILL-1824")
public void schemaChangeValidate() throws Exception {
- testBuilder() //
- .sqlQuery("select b from dfs.`${WORKING_PATH}/src/test/resources/vector/complex/writer/schemaChange/`") //
- .unOrdered() //
- .jsonBaselineFile("/vector/complex/writer/expected.json") //
+ testBuilder()
+ .sqlQuery("select b from dfs.`vector/complex/writer/schemaChange/`")
+ .unOrdered()
+ .jsonBaselineFile("/vector/complex/writer/expected.json")
.build()
.run();
}
@@ -157,7 +156,7 @@ public class TestJsonReader extends BaseTestQuery {
System.out.println("===================");
System.out.println("source data in json");
System.out.println("===================");
- System.out.println(Files.toString(FileUtils.getResourceAsFile(filename), Charsets.UTF_8));
+ System.out.println(Files.toString(DrillFileUtils.getResourceAsFile(filename), Charsets.UTF_8));
}
int i = 0;
@@ -181,14 +180,14 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testReadCompressed() throws Exception {
String filepath = "compressed_json.json";
- File f = folder.newFile(filepath);
+ File f = new File(dirTestWatcher.getRootDir(), filepath);
PrintWriter out = new PrintWriter(f);
out.println("{\"a\" :5}");
out.close();
gzipIt(f);
testBuilder()
- .sqlQuery("select * from dfs.`" + f.getPath() + ".gz" + "`")
+ .sqlQuery("select * from dfs.`%s.gz`", filepath)
.unOrdered()
.baselineColumns("a")
.baselineValues(5l)
@@ -196,7 +195,7 @@ public class TestJsonReader extends BaseTestQuery {
// test reading the uncompressed version as well
testBuilder()
- .sqlQuery("select * from dfs.`" + f.getPath() + "`")
+ .sqlQuery("select * from dfs.`%s`", filepath)
.unOrdered()
.baselineColumns("a")
.baselineValues(5l)
@@ -224,7 +223,7 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testDrill_1419() throws Exception {
- String[] queries = {"select t.trans_id, t.trans_info.prod_id[0],t.trans_info.prod_id[1] from cp.`/store/json/clicks.json` t limit 5"};
+ String[] queries = {"select t.trans_id, t.trans_info.prod_id[0],t.trans_info.prod_id[1] from cp.`store/json/clicks.json` t limit 5"};
long[] rowCounts = {5};
String filename = "/store/json/clicks.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
@@ -232,26 +231,26 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testRepeatedCount() throws Exception {
- test("select repeated_count(str_list) from cp.`/store/json/json_basic_repeated_varchar.json`");
- test("select repeated_count(INT_col) from cp.`/parquet/alltypes_repeated.json`");
- test("select repeated_count(FLOAT4_col) from cp.`/parquet/alltypes_repeated.json`");
- test("select repeated_count(VARCHAR_col) from cp.`/parquet/alltypes_repeated.json`");
- test("select repeated_count(BIT_col) from cp.`/parquet/alltypes_repeated.json`");
+ test("select repeated_count(str_list) from cp.`store/json/json_basic_repeated_varchar.json`");
+ test("select repeated_count(INT_col) from cp.`parquet/alltypes_repeated.json`");
+ test("select repeated_count(FLOAT4_col) from cp.`parquet/alltypes_repeated.json`");
+ test("select repeated_count(VARCHAR_col) from cp.`parquet/alltypes_repeated.json`");
+ test("select repeated_count(BIT_col) from cp.`parquet/alltypes_repeated.json`");
}
@Test
public void testRepeatedContains() throws Exception {
- test("select repeated_contains(str_list, 'asdf') from cp.`/store/json/json_basic_repeated_varchar.json`");
- test("select repeated_contains(INT_col, -2147483648) from cp.`/parquet/alltypes_repeated.json`");
- test("select repeated_contains(FLOAT4_col, -1000000000000.0) from cp.`/parquet/alltypes_repeated.json`");
- test("select repeated_contains(VARCHAR_col, 'qwerty' ) from cp.`/parquet/alltypes_repeated.json`");
- test("select repeated_contains(BIT_col, true) from cp.`/parquet/alltypes_repeated.json`");
- test("select repeated_contains(BIT_col, false) from cp.`/parquet/alltypes_repeated.json`");
+ test("select repeated_contains(str_list, 'asdf') from cp.`store/json/json_basic_repeated_varchar.json`");
+ test("select repeated_contains(INT_col, -2147483648) from cp.`parquet/alltypes_repeated.json`");
+ test("select repeated_contains(FLOAT4_col, -1000000000000.0) from cp.`parquet/alltypes_repeated.json`");
+ test("select repeated_contains(VARCHAR_col, 'qwerty' ) from cp.`parquet/alltypes_repeated.json`");
+ test("select repeated_contains(BIT_col, true) from cp.`parquet/alltypes_repeated.json`");
+ test("select repeated_contains(BIT_col, false) from cp.`parquet/alltypes_repeated.json`");
}
@Test
public void testSingleColumnRead_vector_fill_bug() throws Exception {
- String[] queries = {"select * from cp.`/store/json/single_column_long_file.json`"};
+ String[] queries = {"select * from cp.`store/json/single_column_long_file.json`"};
long[] rowCounts = {13512};
String filename = "/store/json/single_column_long_file.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
@@ -259,7 +258,7 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testNonExistentColumnReadAlone() throws Exception {
- String[] queries = {"select non_existent_column from cp.`/store/json/single_column_long_file.json`"};
+ String[] queries = {"select non_existent_column from cp.`store/json/single_column_long_file.json`"};
long[] rowCounts = {13512};
String filename = "/store/json/single_column_long_file.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
@@ -268,7 +267,7 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testAllTextMode() throws Exception {
test("alter system set `store.json.all_text_mode` = true");
- String[] queries = {"select * from cp.`/store/json/schema_change_int_to_string.json`"};
+ String[] queries = {"select * from cp.`store/json/schema_change_int_to_string.json`"};
long[] rowCounts = {3};
String filename = "/store/json/schema_change_int_to_string.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
@@ -277,7 +276,7 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void readComplexWithStar() throws Exception {
- List<QueryDataBatch> results = testSqlWithResults("select * from cp.`/store/json/test_complex_read_with_star.json`");
+ List<QueryDataBatch> results = testSqlWithResults("select * from cp.`store/json/test_complex_read_with_star.json`");
assertEquals(1, results.size());
RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
@@ -294,7 +293,7 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testNullWhereListExpected() throws Exception {
test("alter system set `store.json.all_text_mode` = true");
- String[] queries = {"select * from cp.`/store/json/null_where_list_expected.json`"};
+ String[] queries = {"select * from cp.`store/json/null_where_list_expected.json`"};
long[] rowCounts = {3};
String filename = "/store/json/null_where_list_expected.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
@@ -304,7 +303,7 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testNullWhereMapExpected() throws Exception {
test("alter system set `store.json.all_text_mode` = true");
- String[] queries = {"select * from cp.`/store/json/null_where_map_expected.json`"};
+ String[] queries = {"select * from cp.`store/json/null_where_map_expected.json`"};
long[] rowCounts = {3};
String filename = "/store/json/null_where_map_expected.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
@@ -324,7 +323,7 @@ public class TestJsonReader extends BaseTestQuery {
// ensure that the project is filtering out the correct data in the scan alone
@Test
public void testProjectPushdown() throws Exception {
- String[] queries = {Files.toString(FileUtils.getResourceAsFile("/store/json/project_pushdown_json_physical_plan.json"), Charsets.UTF_8)};
+ String[] queries = {Files.toString(DrillFileUtils.getResourceAsFile("/store/json/project_pushdown_json_physical_plan.json"), Charsets.UTF_8)};
long[] rowCounts = {3};
String filename = "/store/json/schema_change_int_to_string.json";
test("alter system set `store.json.all_text_mode` = false");
@@ -349,13 +348,8 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testJsonDirectoryWithEmptyFile() throws Exception {
- String root = FileUtils.getResourceAsFile("/store/json/jsonDirectoryWithEmpyFile").toURI().toString();
-
- String queryRightEmpty = String.format(
- "select * from dfs_test.`%s`", root);
-
testBuilder()
- .sqlQuery(queryRightEmpty)
+ .sqlQuery("select * from dfs.`store/json/jsonDirectoryWithEmpyFile`")
.unOrdered()
.baselineColumns("a")
.baselineValues(1l)
@@ -400,9 +394,8 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testSelectStarWithUnionType() throws Exception {
try {
- String query = "select * from cp.`jsoninput/union/a.json`";
testBuilder()
- .sqlQuery(query)
+ .sqlQuery("select * from cp.`jsoninput/union/a.json`")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("field1", "field2")
@@ -452,12 +445,11 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testSelectFromListWithCase() throws Exception {
- String query = "select a, typeOf(a) `type` from " +
- "(select case when is_list(field2) then field2[4][1].inner7 end a " +
- "from cp.`jsoninput/union/a.json`) where a is not null";
try {
testBuilder()
- .sqlQuery(query)
+ .sqlQuery("select a, typeOf(a) `type` from " +
+ "(select case when is_list(field2) then field2[4][1].inner7 end a " +
+ "from cp.`jsoninput/union/a.json`) where a is not null")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("a", "type")
@@ -470,12 +462,11 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testTypeCase() throws Exception {
- String query = "select case when is_bigint(field1) " +
- "then field1 when is_list(field1) then field1[0] " +
- "when is_map(field1) then t.field1.inner1 end f1 from cp.`jsoninput/union/a.json` t";
try {
testBuilder()
- .sqlQuery(query)
+ .sqlQuery("select case when is_bigint(field1) " +
+ "then field1 when is_list(field1) then field1[0] " +
+ "when is_map(field1) then t.field1.inner1 end f1 from cp.`jsoninput/union/a.json` t")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("f1")
@@ -491,13 +482,12 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testSumWithTypeCase() throws Exception {
- String query = "select sum(cast(f1 as bigint)) sum_f1 from " +
- "(select case when is_bigint(field1) then field1 " +
- "when is_list(field1) then field1[0] when is_map(field1) then t.field1.inner1 end f1 " +
- "from cp.`jsoninput/union/a.json` t)";
try {
testBuilder()
- .sqlQuery(query)
+ .sqlQuery("select sum(cast(f1 as bigint)) sum_f1 from " +
+ "(select case when is_bigint(field1) then field1 " +
+ "when is_list(field1) then field1[0] when is_map(field1) then t.field1.inner1 end f1 " +
+ "from cp.`jsoninput/union/a.json` t)")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("sum_f1")
@@ -510,10 +500,9 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testUnionExpressionMaterialization() throws Exception {
- String query = "select a + b c from cp.`jsoninput/union/b.json`";
try {
testBuilder()
- .sqlQuery(query)
+ .sqlQuery("select a + b c from cp.`jsoninput/union/b.json`")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("c")
@@ -528,9 +517,7 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testSumMultipleBatches() throws Exception {
- String dfs_temp = getDfsTestTmpSchemaLocation();
- File table_dir = new File(dfs_temp, "multi_batch");
- table_dir.mkdir();
+ File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("multi_batch"));
BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "a.json")));
for (int i = 0; i < 10000; i++) {
os.write("{ type : \"map\", data : { a : 1 } }\n".getBytes());
@@ -538,10 +525,10 @@ public class TestJsonReader extends BaseTestQuery {
}
os.flush();
os.close();
- String query = "select sum(cast(case when `type` = 'map' then t.data.a else data end as bigint)) `sum` from dfs_test.tmp.multi_batch t";
+
try {
testBuilder()
- .sqlQuery(query)
+ .sqlQuery("select sum(cast(case when `type` = 'map' then t.data.a else data end as bigint)) `sum` from dfs.tmp.multi_batch t")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("sum")
@@ -554,9 +541,7 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testSumFilesWithDifferentSchema() throws Exception {
- String dfs_temp = getDfsTestTmpSchemaLocation();
- File table_dir = new File(dfs_temp, "multi_file");
- table_dir.mkdir();
+ File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("multi_file"));
BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "a.json")));
for (int i = 0; i < 10000; i++) {
os.write("{ type : \"map\", data : { a : 1 } }\n".getBytes());
@@ -569,10 +554,10 @@ public class TestJsonReader extends BaseTestQuery {
}
os.flush();
os.close();
- String query = "select sum(cast(case when `type` = 'map' then t.data.a else data end as bigint)) `sum` from dfs_test.tmp.multi_file t";
+
try {
testBuilder()
- .sqlQuery(query)
+ .sqlQuery("select sum(cast(case when `type` = 'map' then t.data.a else data end as bigint)) `sum` from dfs.tmp.multi_file t")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("sum")
@@ -585,8 +570,7 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void drill_4032() throws Exception {
- String dfs_temp = getDfsTestTmpSchemaLocation();
- File table_dir = new File(dfs_temp, "drill_4032");
+ File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4032"));
table_dir.mkdir();
BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "a.json")));
os.write("{\"col1\": \"val1\",\"col2\": null}".getBytes());
@@ -598,14 +582,13 @@ public class TestJsonReader extends BaseTestQuery {
os.write("{\"col1\": \"val1\",\"col2\": null}".getBytes());
os.flush();
os.close();
- testNoResult("select t.col2.col3 from dfs_test.tmp.drill_4032 t");
+ testNoResult("select t.col2.col3 from dfs.tmp.drill_4032 t");
}
@Test
public void drill_4479() throws Exception {
try {
- String dfs_temp = getDfsTestTmpSchemaLocation();
- File table_dir = new File(dfs_temp, "drill_4479");
+ File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4479"));
table_dir.mkdir();
BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "mostlynulls.json")));
// Create an entire batch of null values for 3 columns
@@ -617,12 +600,8 @@ public class TestJsonReader extends BaseTestQuery {
os.flush();
os.close();
- String query1 = "select c, count(*) as cnt from dfs_test.tmp.drill_4479 t group by c";
- String query2 = "select a, b, c, count(*) as cnt from dfs_test.tmp.drill_4479 t group by a, b, c";
- String query3 = "select max(a) as x, max(b) as y, max(c) as z from dfs_test.tmp.drill_4479 t";
-
testBuilder()
- .sqlQuery(query1)
+ .sqlQuery("select c, count(*) as cnt from dfs.tmp.drill_4479 t group by c")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.baselineColumns("c", "cnt")
@@ -631,7 +610,7 @@ public class TestJsonReader extends BaseTestQuery {
.go();
testBuilder()
- .sqlQuery(query2)
+ .sqlQuery("select a, b, c, count(*) as cnt from dfs.tmp.drill_4479 t group by a, b, c")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.baselineColumns("a", "b", "c", "cnt")
@@ -640,7 +619,7 @@ public class TestJsonReader extends BaseTestQuery {
.go();
testBuilder()
- .sqlQuery(query3)
+ .sqlQuery("select max(a) as x, max(b) as y, max(c) as z from dfs.tmp.drill_4479 t")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.baselineColumns("x", "y", "z")
@@ -654,18 +633,12 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testFlattenEmptyArrayWithAllTextMode() throws Exception {
- File path = new File(BaseTestQuery.getTempDir("json/input"));
- path.mkdirs();
- path.deleteOnExit();
- String pathString = path.toPath().toString();
-
- try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(path, "empty_array_all_text_mode.json")))) {
+ try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), "empty_array_all_text_mode.json")))) {
writer.write("{ \"a\": { \"b\": { \"c\": [] }, \"c\": [] } }");
}
try {
- String query = String.format("select flatten(t.a.b.c) as c from dfs_test.`%s/empty_array_all_text_mode.json` t",
- pathString);
+ String query = "select flatten(t.a.b.c) as c from dfs.`empty_array_all_text_mode.json` t";
testBuilder()
.sqlQuery(query)
@@ -688,18 +661,12 @@ public class TestJsonReader extends BaseTestQuery {
@Test
public void testFlattenEmptyArrayWithUnionType() throws Exception {
- File path = new File(BaseTestQuery.getTempDir("json/input"));
- path.mkdirs();
- path.deleteOnExit();
- String pathString = path.toPath().toString();
-
- try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(path, "empty_array.json")))) {
+ try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), "empty_array.json")))) {
writer.write("{ \"a\": { \"b\": { \"c\": [] }, \"c\": [] } }");
}
try {
- String query = String.format("select flatten(t.a.b.c) as c from dfs_test.`%s/empty_array.json` t",
- pathString);
+ String query = "select flatten(t.a.b.c) as c from dfs.`empty_array.json` t";
testBuilder()
.sqlQuery(query)
@@ -724,49 +691,33 @@ public class TestJsonReader extends BaseTestQuery {
@Test // DRILL-5521
public void testKvgenWithUnionAll() throws Exception {
- File directory = new File(BaseTestQuery.getTempDir("json/input"));
- try {
- directory.mkdirs();
- String fileName = "map.json";
- try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(directory, fileName)))) {
- writer.write("{\"rk\": \"a\", \"m\": {\"a\":\"1\"}}");
- }
-
- String query = String.format("select kvgen(m) as res from (select m from dfs_test.`%s/%s` union all " +
- "select convert_from('{\"a\" : null}' ,'json') as m from (values(1)))", directory.toPath().toString(), fileName);
- assertEquals("Row count should match", 2, testSql(query));
-
- } finally {
- org.apache.commons.io.FileUtils.deleteQuietly(directory);
+ String fileName = "map.json";
+ try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), fileName)))) {
+ writer.write("{\"rk\": \"a\", \"m\": {\"a\":\"1\"}}");
}
+
+ String query = String.format("select kvgen(m) as res from (select m from dfs.`%s` union all " +
+ "select convert_from('{\"a\" : null}' ,'json') as m from (values(1)))", fileName);
+ assertEquals("Row count should match", 2, testSql(query));
}
@Test // DRILL-4264
public void testFieldWithDots() throws Exception {
- File directory = new File(BaseTestQuery.getTempDir("json/input"));
- try {
- directory.mkdirs();
- String fileName = "table.json";
- try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(directory, fileName)))) {
- writer.write("{\"rk.q\": \"a\", \"m\": {\"a.b\":\"1\", \"a\":{\"b\":\"2\"}, \"c\":\"3\"}}");
- }
-
- String query = String.format("select t.m.`a.b` as a,\n" +
- "t.m.a.b as b,\n" +
- "t.m['a.b'] as c,\n" +
- "t.rk.q as d,\n" +
- "t.`rk.q` as e\n" +
- "from dfs_test.`%s/%s` t",
- directory.toPath().toString(), fileName);
- testBuilder()
- .sqlQuery(query)
- .unOrdered()
- .baselineColumns("a", "b", "c", "d", "e")
- .baselineValues("1", "2", "1", null, "a")
- .go();
-
- } finally {
- org.apache.commons.io.FileUtils.deleteQuietly(directory);
+ String fileName = "table.json";
+ try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), fileName)))) {
+ writer.write("{\"rk.q\": \"a\", \"m\": {\"a.b\":\"1\", \"a\":{\"b\":\"2\"}, \"c\":\"3\"}}");
}
+
+ testBuilder()
+ .sqlQuery("select t.m.`a.b` as a,\n" +
+ "t.m.a.b as b,\n" +
+ "t.m['a.b'] as c,\n" +
+ "t.rk.q as d,\n" +
+ "t.`rk.q` as e\n" +
+ "from dfs.`%s` t", fileName)
+ .unOrdered()
+ .baselineColumns("a", "b", "c", "d", "e")
+ .baselineValues("1", "2", "1", null, "a")
+ .go();
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/TestSpoolingBuffer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/TestSpoolingBuffer.java b/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/TestSpoolingBuffer.java
index 389bb80..0f18fba 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/TestSpoolingBuffer.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/work/batch/TestSpoolingBuffer.java
@@ -21,9 +21,9 @@ import static org.junit.Assert.assertEquals;
import java.util.List;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.util.FileUtils;
+import org.apache.drill.common.util.DrillFileUtils;
import org.apache.drill.exec.client.DrillClient;
import org.apache.drill.exec.rpc.user.QueryDataBatch;
import org.apache.drill.exec.server.Drillbit;
@@ -47,7 +47,7 @@ public class TestSpoolingBuffer extends BaseTestQuery {
bit1.run();
client.connect();
List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
- Files.toString(FileUtils.getResourceAsFile("/work/batch/multiple_exchange.json"),
+ Files.toString(DrillFileUtils.getResourceAsFile("/work/batch/multiple_exchange.json"),
Charsets.UTF_8));
int count = 0;
for(QueryDataBatch b : results) {
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/work/fragment/TestFragmentExecutorCancel.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/work/fragment/TestFragmentExecutorCancel.java b/exec/java-exec/src/test/java/org/apache/drill/exec/work/fragment/TestFragmentExecutorCancel.java
index cdb703f..d2181b4 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/work/fragment/TestFragmentExecutorCancel.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/work/fragment/TestFragmentExecutorCancel.java
@@ -19,7 +19,7 @@ package org.apache.drill.exec.work.fragment;
import static org.junit.Assert.fail;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.exec.exception.OutOfMemoryException;
import org.apache.drill.exec.proto.CoordinationProtos;
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestMetadataProvider.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestMetadataProvider.java b/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestMetadataProvider.java
index 4eabfe3..c30cb09 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestMetadataProvider.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestMetadataProvider.java
@@ -27,7 +27,7 @@ import static org.junit.Assert.fail;
import java.util.Arrays;
import java.util.List;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.categories.OptionsTest;
import org.apache.drill.exec.proto.UserProtos.CatalogMetadata;
import org.apache.drill.exec.proto.UserProtos.ColumnMetadata;
@@ -103,16 +103,13 @@ public class TestMetadataProvider extends BaseTestQuery {
assertEquals(RequestStatus.OK, resp.getStatus());
List<SchemaMetadata> schemas = resp.getSchemasList();
- assertEquals(9, schemas.size());
+ assertEquals(6, schemas.size());
verifySchema("INFORMATION_SCHEMA", schemas);
verifySchema("cp.default", schemas);
verifySchema("dfs.default", schemas);
verifySchema("dfs.root", schemas);
verifySchema("dfs.tmp", schemas);
- verifySchema("dfs_test.default", schemas);
- verifySchema("dfs_test.home", schemas);
- verifySchema("dfs_test.tmp", schemas);
verifySchema("sys", schemas);
}
@@ -131,21 +128,17 @@ public class TestMetadataProvider extends BaseTestQuery {
@Test
public void schemasWithCatalogNameFilterAndSchemaNameFilter() throws Exception {
-
- // test("SELECT * FROM INFORMATION_SCHEMA.SCHEMATA " +
- // "WHERE CATALOG_NAME LIKE '%RI%' AND SCHEMA_NAME LIKE '%y%'"); // SQL equivalent
-
GetSchemasResp resp = client.getSchemas(
LikeFilter.newBuilder().setPattern("%RI%").build(),
- LikeFilter.newBuilder().setPattern("%dfs_test%").build()).get();
+ LikeFilter.newBuilder().setPattern("%dfs%").build()).get();
assertEquals(RequestStatus.OK, resp.getStatus());
List<SchemaMetadata> schemas = resp.getSchemasList();
assertEquals(3, schemas.size());
- verifySchema("dfs_test.default", schemas);
- verifySchema("dfs_test.home", schemas);
- verifySchema("dfs_test.tmp", schemas);
+ verifySchema("dfs.default", schemas);
+ verifySchema("dfs.root", schemas);
+ verifySchema("dfs.tmp", schemas);
}
@Test
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestServerMetaProvider.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestServerMetaProvider.java b/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestServerMetaProvider.java
index c1fd38b..7099a37 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestServerMetaProvider.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/work/metadata/TestServerMetaProvider.java
@@ -21,7 +21,7 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import org.apache.calcite.avatica.util.Quoting;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.exec.proto.UserProtos.GetServerMetaResp;
import org.apache.drill.exec.proto.UserProtos.RequestStatus;
import org.apache.drill.exec.proto.UserProtos.ServerMeta;
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/exec/work/prepare/PreparedStatementTestBase.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/work/prepare/PreparedStatementTestBase.java b/exec/java-exec/src/test/java/org/apache/drill/exec/work/prepare/PreparedStatementTestBase.java
index 814414c..6802bc9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/work/prepare/PreparedStatementTestBase.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/work/prepare/PreparedStatementTestBase.java
@@ -16,7 +16,7 @@
*/
package org.apache.drill.exec.work.prepare;
-import org.apache.drill.BaseTestQuery;
+import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.exec.proto.UserBitShared;
import org.apache.drill.exec.proto.UserProtos;
import org.apache.drill.exec.store.ischema.InfoSchemaConstants;
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/test/BaseDirTestWatcher.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/BaseDirTestWatcher.java b/exec/java-exec/src/test/java/org/apache/drill/test/BaseDirTestWatcher.java
new file mode 100644
index 0000000..fe7e6a6
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/BaseDirTestWatcher.java
@@ -0,0 +1,278 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.drill.test;
+
+import com.google.common.base.Charsets;
+import org.apache.commons.io.FileUtils;
+import org.junit.runner.Description;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+
+/**
+ * <p>
+ * This is a {@link DirTestWatcher} which creates all the temporary directories required by a Drillbit and the various <b>dfs.*</b> storage workspaces. It also
+ * provides convenience methods that do the following:
+ * </p>
+ *
+ * <ol>
+ * <li>Copy project files to temp directories. This is useful for copying the sample data into a temp directory.</li>
+ * <li>Copy resource files to temp.</li>
+ * <li>Updating parquet metadata files.</li>
+ * </ol>
+ *
+ * <p>
+ * The {@link BaseDirTestWatcher} is used in {@link BaseTestQuery} and an example of how it is used in conjunction with the {@link ClusterFixture} can be found in
+ * {@link ExampleTest}.
+ * </p>
+ */
+public class BaseDirTestWatcher extends DirTestWatcher {
+ /**
+ * An enum used to represent the directories mapped to the <b>dfs.root</b> and <b>dfs.tmp</b> workspaces repectively.
+ */
+ public enum DirType {
+ ROOT, // Corresponds to the directory that should be mapped to dfs.root
+ TEST_TMP // Corresponds to the directory that should be mapped to dfs.tmp
+ }
+
+ private File tmpDir;
+ private File storeDir;
+ private File dfsTestTmpParentDir;
+ private File dfsTestTmpDir;
+ private File rootDir;
+
+ /**
+ * Creates a {@link BaseDirTestWatcher} which does not delete it's temp directories at the end of tests.
+ */
+ public BaseDirTestWatcher() {
+ super();
+ }
+
+ /**
+ * Creates a {@link BaseDirTestWatcher}.
+ * @param deleteDirAtEnd If true, temp directories are deleted at the end of tests. If fals, temp directories are not deleted at the end of tests.
+ */
+ public BaseDirTestWatcher(boolean deleteDirAtEnd) {
+ super(deleteDirAtEnd);
+ }
+
+ @Override
+ protected void starting(Description description) {
+ super.starting(description);
+
+ rootDir = makeSubDir(Paths.get("root"));
+ tmpDir = makeSubDir(Paths.get("tmp"));
+ storeDir = makeSubDir(Paths.get("store"));
+ dfsTestTmpParentDir = makeSubDir(Paths.get("dfsTestTmp"));
+
+ newDfsTestTmpDir();
+ }
+
+ /**
+ * Clear contents of cluster directories
+ */
+ public void clear() {
+ try {
+ FileUtils.cleanDirectory(rootDir);
+ FileUtils.cleanDirectory(tmpDir);
+ FileUtils.cleanDirectory(storeDir);
+ FileUtils.cleanDirectory(dfsTestTmpDir);
+ } catch (IOException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ /**
+ * Gets the temp directory that should be used as a Drillbit's tmp directory.
+ * @return The temp directory that should be used as a Drillbit's tmp directory.
+ */
+ public File getTmpDir() {
+ return tmpDir;
+ }
+
+ /**
+ * Gets the temp directory that should be used by the {@link org.apache.drill.exec.store.sys.store.LocalPersistentStore}.
+ * @return The temp directory that should be used by the {@link org.apache.drill.exec.store.sys.store.LocalPersistentStore}.
+ */
+ public File getStoreDir() {
+ return storeDir;
+ }
+
+ /**
+ * Gets the temp directory that should be used by the <b>dfs.tmp</b> workspace.
+ * @return The temp directory that should be used by the <b>dfs.tmp</b> workspace.
+ */
+ public File getDfsTestTmpDir() {
+ return dfsTestTmpDir;
+ }
+
+ /**
+ * Gets the temp directory that should be used to hold the contents of the <b>dfs.root</b> workspace.
+ * @return The temp directory that should be used to hold the contents of the <b>dfs.root</b> workspace.
+ */
+ public File getRootDir() {
+ return rootDir;
+ }
+
+ /**
+ * This methods creates a new directory which can be mapped to <b>dfs.tmp</b>.
+ */
+ public void newDfsTestTmpDir() {
+ dfsTestTmpDir = DirTestWatcher.createTempDir(dfsTestTmpParentDir);
+ }
+
+ /**
+ * A helper method which returns the correct directory corresponding to the given {@link DirType}.
+ * @param type The directory to return.
+ * @return The directory corresponding to the given {@link DirType}.
+ */
+ private File getDir(DirType type) {
+ switch (type) {
+ case ROOT:
+ return rootDir;
+ case TEST_TMP:
+ return dfsTestTmpDir;
+ default:
+ throw new IllegalArgumentException(String.format("Unsupported type %s", type));
+ }
+ }
+
+ /**
+ * Creates a directory in the temp root directory (corresponding to <b>dfs.root</b>) at the given relative path.
+ * @param relPath The relative path in the temp root directory at which to create a directory.
+ * @return The {@link java.io.File} corresponding to the sub directory that was created.
+ */
+ public File makeRootSubDir(Path relPath) {
+ return makeSubDir(relPath, DirType.ROOT);
+ }
+
+ /**
+ * Creates a directory in the temp tmp directory (corresponding to <b>dfs.tmp</b>) at the given relative path.
+ * @param relPath The relative path in the temp tmp directory at which to create a directory.
+ * @return The {@link java.io.File} corresponding to the sub directory that was created.
+ */
+ public File makeTestTmpSubDir(Path relPath) {
+ return makeSubDir(relPath, DirType.TEST_TMP);
+ }
+
+ private File makeSubDir(Path relPath, DirType type) {
+ File subDir = getDir(type)
+ .toPath()
+ .resolve(relPath)
+ .toFile();
+ subDir.mkdirs();
+ return subDir;
+ }
+
+ /**
+ * This copies a file or directory from <b>src/test/resources</b> into the temp root directory (corresponding to <b>dfs.root</b>). The relative path of the file or
+ * directory in <b>src/test/resources</b> is preserved in the temp root directory.
+ * @param relPath The relative path of the file or directory in <b>src/test/resources</b> to copy into the root temp folder.
+ * @return The {@link java.io.File} corresponding to the copied file or directory in the temp root directory.
+ */
+ public File copyResourceToRoot(Path relPath) {
+ return copyTo(relPath, relPath, TestTools.FileSource.RESOURCE, DirType.ROOT);
+ }
+
+ /**
+ * This copies a filed or directory from the maven project into the temp root directory (corresponding to <b>dfs.root</b>). The relative path of the file or directory
+ * in the maven module is preserved in the temp root directory.
+ * @param relPath The relative path of the file or directory in the maven module to copy into the root temp folder.
+ * @return The {@link java.io.File} corresponding to the copied file or directory in the temp root directory.
+ */
+ public File copyFileToRoot(Path relPath) {
+ return copyTo(relPath, relPath, TestTools.FileSource.PROJECT, DirType.ROOT);
+ }
+
+ /**
+ * This copies a file or directory from <b>src/test/resources</b> into the temp root directory (corresponding to <b>dfs.root</b>). The file or directory is copied
+ * to the provided relative destPath in the temp root directory.
+ * @param relPath The source relative path of a file or directory from <b>src/test/resources</b> that will be copied.
+ * @param destPath The destination relative path of the file or directory in the temp root directory.
+ * @return The {@link java.io.File} corresponding to the final copied file or directory in the temp root directory.
+ */
+ public File copyResourceToRoot(Path relPath, Path destPath) {
+ return copyTo(relPath, destPath, TestTools.FileSource.RESOURCE, DirType.ROOT);
+ }
+
+ /**
+ * This copies a file or directory from <b>src/test/resources</b> into the temp root directory (corresponding to <b>dfs.root</b>). The file or directory is copied
+ * to the provided relative destPath in the temp root directory.
+ * @param relPath The source relative path of a file or directory from <b>src/test/resources</b> that will be copied.
+ * @param destPath The destination relative path of the file or directory in the temp root directory.
+ * @return The {@link java.io.File} corresponding to the final copied file or directory in the temp root directory.
+ */
+ public File copyResourceToTestTmp(Path relPath, Path destPath) {
+ return copyTo(relPath, destPath, TestTools.FileSource.RESOURCE, DirType.TEST_TMP);
+ }
+
+ private File copyTo(Path relPath, Path destPath, TestTools.FileSource fileSource, DirType dirType) {
+ File file = TestTools.getFile(relPath, fileSource);
+
+ if (file.isDirectory()) {
+ File subDir = makeSubDir(destPath, dirType);
+ TestTools.copyDirToDest(relPath, subDir, fileSource);
+ return subDir;
+ } else {
+ File baseDir = getDir(dirType);
+
+ baseDir.toPath()
+ .resolve(destPath)
+ .getParent()
+ .toFile()
+ .mkdirs();
+
+ File destFile = baseDir.toPath()
+ .resolve(destPath)
+ .toFile();
+
+ try {
+ destFile.createNewFile();
+ FileUtils.copyFile(file, destFile);
+ } catch (IOException e) {
+ throw new RuntimeException("This should not happen", e);
+ }
+
+ return destFile;
+ }
+ }
+
+ /**
+ * This is a convenience method that replaces placeholders in test parquet metadata files.
+ * @param metaDataFile The parquet metadata file to do string replacement on.
+ * @param replacePath The path to replace <b>REPLACED_IN_TEST</b> with in the parquet metadata file.
+ * @param customStringReplacement If this is provided a <b>CUSTOM_STRING_REPLACEMENT</b> is replaced in the parquet metadata file with this string.
+ */
+ public void replaceMetaDataContents(File metaDataFile, File replacePath, String customStringReplacement) {
+ try {
+ String metadataFileContents = FileUtils.readFileToString(metaDataFile, Charsets.UTF_8);
+
+ if (customStringReplacement != null) {
+ metadataFileContents = metadataFileContents.replace("CUSTOM_STRING_REPLACEMENT", customStringReplacement);
+ }
+
+ metadataFileContents = metadataFileContents.replace("REPLACED_IN_TEST", replacePath.getCanonicalPath());
+ FileUtils.write(metaDataFile, metadataFileContents, Charsets.UTF_8);
+ } catch (IOException e) {
+ throw new RuntimeException("This should not happen", e);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java b/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java
new file mode 100644
index 0000000..802ce1b
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java
@@ -0,0 +1,613 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.test;
+
+import static org.apache.drill.exec.util.StoragePluginTestUtils.DEFAULT_SCHEMA;
+import static org.apache.drill.exec.util.StoragePluginTestUtils.ROOT_SCHEMA;
+import static org.apache.drill.exec.util.StoragePluginTestUtils.TMP_SCHEMA;
+import static org.hamcrest.core.StringContains.containsString;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.fail;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URL;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.drill.test.DrillTestWrapper.TestServices;
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.config.DrillProperties;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.scanner.ClassPathScanner;
+import org.apache.drill.common.scanner.persistence.ScanResult;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.ExecTest;
+import org.apache.drill.exec.client.DrillClient;
+import org.apache.drill.exec.exception.SchemaChangeException;
+import org.apache.drill.exec.memory.BufferAllocator;
+import org.apache.drill.exec.memory.RootAllocatorFactory;
+import org.apache.drill.exec.proto.UserBitShared;
+import org.apache.drill.exec.proto.UserBitShared.QueryId;
+import org.apache.drill.exec.proto.UserBitShared.QueryResult.QueryState;
+import org.apache.drill.exec.proto.UserBitShared.QueryType;
+import org.apache.drill.exec.proto.UserProtos.PreparedStatementHandle;
+import org.apache.drill.exec.record.RecordBatchLoader;
+import org.apache.drill.exec.rpc.ConnectionThrottle;
+import org.apache.drill.exec.rpc.user.AwaitableUserResultsListener;
+import org.apache.drill.exec.rpc.user.QueryDataBatch;
+import org.apache.drill.exec.rpc.user.UserResultsListener;
+import org.apache.drill.exec.server.Drillbit;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.server.RemoteServiceSet;
+import org.apache.drill.exec.store.StoragePluginRegistry;
+import org.apache.drill.exec.util.StoragePluginTestUtils;
+import org.apache.drill.exec.util.VectorUtil;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+import org.junit.ClassRule;
+
+import com.google.common.base.Charsets;
+import com.google.common.base.Preconditions;
+import com.google.common.io.Resources;
+
+import org.apache.drill.exec.record.VectorWrapper;
+import org.apache.drill.exec.vector.ValueVector;
+import org.apache.drill.test.ClusterFixture;
+
+public class BaseTestQuery extends ExecTest {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BaseTestQuery.class);
+
+ private static final int MAX_WIDTH_PER_NODE = 2;
+
+ @SuppressWarnings("serial")
+ private static final Properties TEST_CONFIGURATIONS = new Properties() {
+ {
+ put(ExecConstants.SYS_STORE_PROVIDER_LOCAL_ENABLE_WRITE, "false");
+ put(ExecConstants.HTTP_ENABLE, "false");
+ // Increasing retry attempts for testing
+ put(ExecConstants.UDF_RETRY_ATTEMPTS, "10");
+ put(ExecConstants.SSL_USE_HADOOP_CONF, "false");
+ }
+ };
+
+ protected static DrillClient client;
+ protected static Drillbit[] bits;
+ protected static RemoteServiceSet serviceSet;
+ protected static DrillConfig config;
+ protected static BufferAllocator allocator;
+
+ /**
+ * Number of Drillbits in test cluster. Default is 1.
+ *
+ * Tests can update the cluster size through {@link #updateTestCluster(int, DrillConfig)}
+ */
+ private static int drillbitCount = 1;
+
+ private int[] columnWidths = new int[] { 8 };
+
+ private static ScanResult classpathScan;
+
+ @ClassRule
+ public static final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher();
+
+ @BeforeClass
+ public static void setupDefaultTestCluster() throws Exception {
+ config = DrillConfig.create(cloneDefaultTestConfigProperties());
+ classpathScan = ClassPathScanner.fromPrescan(config);
+ openClient();
+ // turns on the verbose errors in tests
+ // sever side stacktraces are added to the message before sending back to the client
+ test("ALTER SESSION SET `exec.errors.verbose` = true");
+ }
+
+ protected static void updateTestCluster(int newDrillbitCount, DrillConfig newConfig) {
+ updateTestCluster(newDrillbitCount, newConfig, cloneDefaultTestConfigProperties());
+ }
+
+ protected static void updateTestCluster(int newDrillbitCount, DrillConfig newConfig, Properties properties) {
+ Preconditions.checkArgument(newDrillbitCount > 0, "Number of Drillbits must be at least one");
+ if (drillbitCount != newDrillbitCount || config != null) {
+ // TODO: Currently we have to shutdown the existing Drillbit cluster before starting a new one with the given
+ // Drillbit count. Revisit later to avoid stopping the cluster.
+ try {
+ closeClient();
+ drillbitCount = newDrillbitCount;
+ if (newConfig != null) {
+ // For next test class, updated DrillConfig will be replaced by default DrillConfig in BaseTestQuery as part
+ // of the @BeforeClass method of test class.
+ config = newConfig;
+ }
+ openClient(properties);
+ } catch(Exception e) {
+ throw new RuntimeException("Failure while updating the test Drillbit cluster.", e);
+ }
+ }
+ }
+
+ /**
+ * Useful for tests that require a DrillbitContext to get/add storage plugins, options etc.
+ *
+ * @return DrillbitContext of first Drillbit in the cluster.
+ */
+ protected static DrillbitContext getDrillbitContext() {
+ Preconditions.checkState(bits != null && bits[0] != null, "Drillbits are not setup.");
+ return bits[0].getContext();
+ }
+
+ protected static Properties cloneDefaultTestConfigProperties() {
+ final Properties props = new Properties();
+ for(String propName : TEST_CONFIGURATIONS.stringPropertyNames()) {
+ props.put(propName, TEST_CONFIGURATIONS.getProperty(propName));
+ }
+
+ props.setProperty(ExecConstants.DRILL_TMP_DIR, dirTestWatcher.getTmpDir().getAbsolutePath());
+ props.setProperty(ExecConstants.SYS_STORE_PROVIDER_LOCAL_PATH, dirTestWatcher.getStoreDir().getAbsolutePath());
+
+ return props;
+ }
+
+ private static void openClient() throws Exception {
+ openClient(null);
+ }
+
+ private static void openClient(Properties properties) throws Exception {
+ if (properties == null) {
+ properties = new Properties();
+ }
+
+ allocator = RootAllocatorFactory.newRoot(config);
+ serviceSet = RemoteServiceSet.getLocalServiceSet();
+
+ dirTestWatcher.newDfsTestTmpDir();
+
+ bits = new Drillbit[drillbitCount];
+ for(int i = 0; i < drillbitCount; i++) {
+ bits[i] = new Drillbit(config, serviceSet, classpathScan);
+ bits[i].run();
+
+ @SuppressWarnings("resource")
+ final StoragePluginRegistry pluginRegistry = bits[i].getContext().getStorage();
+ StoragePluginTestUtils.configureFormatPlugins(pluginRegistry);
+
+ StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry,
+ dirTestWatcher.getDfsTestTmpDir(), TMP_SCHEMA);
+ StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry,
+ dirTestWatcher.getRootDir(), ROOT_SCHEMA);
+ StoragePluginTestUtils.updateSchemaLocation(StoragePluginTestUtils.DFS_PLUGIN_NAME, pluginRegistry,
+ dirTestWatcher.getRootDir(), DEFAULT_SCHEMA);
+ }
+
+ if (!properties.containsKey(DrillProperties.DRILLBIT_CONNECTION)) {
+ properties.setProperty(DrillProperties.DRILLBIT_CONNECTION,
+ String.format("localhost:%s", bits[0].getUserPort()));
+ }
+
+ DrillConfig clientConfig = DrillConfig.forClient();
+ client = QueryTestUtil.createClient(clientConfig, serviceSet, MAX_WIDTH_PER_NODE, properties);
+ }
+
+ /**
+ * Close the current <i>client</i> and open a new client using the given <i>properties</i>. All tests executed
+ * after this method call use the new <i>client</i>.
+ *
+ * @param properties
+ */
+ public static void updateClient(Properties properties) throws Exception {
+ Preconditions.checkState(bits != null && bits[0] != null, "Drillbits are not setup.");
+ if (client != null) {
+ client.close();
+ client = null;
+ }
+
+ DrillConfig clientConfig = DrillConfig.forClient();
+ client = QueryTestUtil.createClient(clientConfig, serviceSet, MAX_WIDTH_PER_NODE, properties);
+ }
+
+ /*
+ * Close the current <i>client</i> and open a new client for the given user. All tests executed
+ * after this method call use the new <i>client</i>.
+ * @param user
+ */
+ public static void updateClient(String user) throws Exception {
+ updateClient(user, null);
+ }
+
+ /*
+ * Close the current <i>client</i> and open a new client for the given user and password credentials. Tests
+ * executed after this method call use the new <i>client</i>.
+ * @param user
+ */
+ public static void updateClient(final String user, final String password) throws Exception {
+ final Properties props = new Properties();
+ props.setProperty(DrillProperties.USER, user);
+ if (password != null) {
+ props.setProperty(DrillProperties.PASSWORD, password);
+ }
+ updateClient(props);
+ }
+
+ protected static BufferAllocator getAllocator() {
+ return allocator;
+ }
+
+ public static int getUserPort() {
+ return bits[0].getUserPort();
+ }
+
+ public static TestBuilder newTest() {
+ return testBuilder();
+ }
+
+
+ public static class ClassicTestServices implements TestServices {
+ @Override
+ public BufferAllocator allocator() {
+ return allocator;
+ }
+
+ @Override
+ public void test(String query) throws Exception {
+ BaseTestQuery.test(query);
+ }
+
+ @Override
+ public List<QueryDataBatch> testRunAndReturn(final QueryType type, final Object query) throws Exception {
+ return BaseTestQuery.testRunAndReturn(type, query);
+ }
+ }
+
+ public static TestBuilder testBuilder() {
+ return new TestBuilder(new ClassicTestServices());
+ }
+
+ @AfterClass
+ public static void closeClient() throws Exception {
+ if (client != null) {
+ client.close();
+ }
+
+ if (bits != null) {
+ for(final Drillbit bit : bits) {
+ if (bit != null) {
+ bit.close();
+ }
+ }
+ }
+
+ if(serviceSet != null) {
+ serviceSet.close();
+ }
+ if (allocator != null) {
+ allocator.close();
+ }
+ }
+
+ @AfterClass
+ public static void resetDrillbitCount() {
+ // some test classes assume this value to be 1 and will fail if run along other tests that increase it
+ drillbitCount = 1;
+ }
+
+ protected static void runSQL(String sql) throws Exception {
+ final AwaitableUserResultsListener listener = new AwaitableUserResultsListener(new SilentListener());
+ testWithListener(QueryType.SQL, sql, listener);
+ listener.await();
+ }
+
+ protected static List<QueryDataBatch> testSqlWithResults(String sql) throws Exception{
+ return testRunAndReturn(QueryType.SQL, sql);
+ }
+
+ protected static List<QueryDataBatch> testLogicalWithResults(String logical) throws Exception{
+ return testRunAndReturn(QueryType.LOGICAL, logical);
+ }
+
+ protected static List<QueryDataBatch> testPhysicalWithResults(String physical) throws Exception{
+ return testRunAndReturn(QueryType.PHYSICAL, physical);
+ }
+
+ public static List<QueryDataBatch> testRunAndReturn(QueryType type, Object query) throws Exception{
+ if (type == QueryType.PREPARED_STATEMENT) {
+ Preconditions.checkArgument(query instanceof PreparedStatementHandle,
+ "Expected an instance of PreparedStatement as input query");
+ return testPreparedStatement((PreparedStatementHandle)query);
+ } else {
+ Preconditions.checkArgument(query instanceof String, "Expected a string as input query");
+ query = QueryTestUtil.normalizeQuery((String)query);
+ return client.runQuery(type, (String)query);
+ }
+ }
+
+ public static List<QueryDataBatch> testPreparedStatement(PreparedStatementHandle handle) throws Exception {
+ return client.executePreparedStatement(handle);
+ }
+
+ public static int testRunAndPrint(final QueryType type, final String query) throws Exception {
+ return QueryTestUtil.testRunAndPrint(client, type, query);
+ }
+
+ protected static void testWithListener(QueryType type, String query, UserResultsListener resultListener) {
+ QueryTestUtil.testWithListener(client, type, query, resultListener);
+ }
+
+ public static void testNoResult(String query, Object... args) throws Exception {
+ testNoResult(1, query, args);
+ }
+
+ public static void alterSession(String option, Object value) {
+ String valueStr = ClusterFixture.stringify(value);
+ try {
+ test("ALTER SESSION SET `%s` = %s", option, valueStr);
+ } catch(final Exception e) {
+ fail(String.format("Failed to set session option `%s` = %s, Error: %s",
+ option, valueStr, e.toString()));
+ }
+ }
+
+ public static void resetSessionOption(String option) {
+ try {
+ test("ALTER SESSION RESET `%s`", option);
+ } catch(final Exception e) {
+ fail(String.format("Failed to reset session option `%s`, Error: %s",
+ option, e.toString()));
+ }
+ }
+
+ protected static void testNoResult(int interation, String query, Object... args) throws Exception {
+ query = String.format(query, args);
+ logger.debug("Running query:\n--------------\n" + query);
+ for (int i = 0; i < interation; i++) {
+ final List<QueryDataBatch> results = client.runQuery(QueryType.SQL, query);
+ for (final QueryDataBatch queryDataBatch : results) {
+ queryDataBatch.release();
+ }
+ }
+ }
+
+ public static void test(String query, Object... args) throws Exception {
+ QueryTestUtil.test(client, String.format(query, args));
+ }
+
+ public static void test(final String query) throws Exception {
+ QueryTestUtil.test(client, query);
+ }
+
+ protected static int testPhysical(String query) throws Exception{
+ return testRunAndPrint(QueryType.PHYSICAL, query);
+ }
+
+ protected static int testSql(String query) throws Exception{
+ return testRunAndPrint(QueryType.SQL, query);
+ }
+
+ protected static void testPhysicalFromFile(String file) throws Exception{
+ testPhysical(getFile(file));
+ }
+
+ /**
+ * Utility method which tests given query produces a {@link UserException} and the exception message contains
+ * the given message.
+ * @param testSqlQuery Test query
+ * @param expectedErrorMsg Expected error message.
+ */
+ protected static void errorMsgTestHelper(final String testSqlQuery, final String expectedErrorMsg) throws Exception {
+ try {
+ test(testSqlQuery);
+ fail("Expected a UserException when running " + testSqlQuery);
+ } catch (final UserException actualException) {
+ try {
+ assertThat("message of UserException when running " + testSqlQuery, actualException.getMessage(), containsString(expectedErrorMsg));
+ } catch (AssertionError e) {
+ e.addSuppressed(actualException);
+ throw e;
+ }
+ }
+ }
+
+ /**
+ * Utility method which tests given query produces a {@link UserException}
+ * with {@link org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType} being DrillPBError.ErrorType.PARSE
+ * the given message.
+ * @param testSqlQuery Test query
+ */
+ protected static void parseErrorHelper(final String testSqlQuery) throws Exception {
+ errorMsgTestHelper(testSqlQuery, UserBitShared.DrillPBError.ErrorType.PARSE.name());
+ }
+
+ public static String getFile(String resource) throws IOException{
+ final URL url = Resources.getResource(resource);
+ if (url == null) {
+ throw new IOException(String.format("Unable to find path %s.", resource));
+ }
+ return Resources.toString(url, Charsets.UTF_8);
+ }
+
+ /**
+ * Copy the resource (ex. file on classpath) to a physical file on FileSystem.
+ * @param resource
+ * @return the file path
+ * @throws IOException
+ */
+ public static String getPhysicalFileFromResource(final String resource) throws IOException {
+ final File file = File.createTempFile("tempfile", ".txt");
+ file.deleteOnExit();
+ final PrintWriter printWriter = new PrintWriter(file);
+ printWriter.write(BaseTestQuery.getFile(resource));
+ printWriter.close();
+
+ return file.getPath();
+ }
+
+ protected static void setSessionOption(final String option, final boolean value) {
+ alterSession(option, value);
+ }
+
+ protected static void setSessionOption(final String option, final long value) {
+ alterSession(option, value);
+ }
+
+ protected static void setSessionOption(final String option, final double value) {
+ alterSession(option, value);
+ }
+
+ protected static void setSessionOption(final String option, final String value) {
+ alterSession(option, value);
+ }
+
+ public static class SilentListener implements UserResultsListener {
+ private final AtomicInteger count = new AtomicInteger();
+
+ @Override
+ public void submissionFailed(UserException ex) {
+ logger.debug("Query failed: " + ex.getMessage());
+ }
+
+ @Override
+ public void queryCompleted(QueryState state) {
+ logger.debug("Query completed successfully with row count: " + count.get());
+ }
+
+ @Override
+ public void dataArrived(QueryDataBatch result, ConnectionThrottle throttle) {
+ final int rows = result.getHeader().getRowCount();
+ if (result.getData() != null) {
+ count.addAndGet(rows);
+ }
+ result.release();
+ }
+
+ @Override
+ public void queryIdArrived(QueryId queryId) {}
+
+ }
+
+ protected void setColumnWidth(int columnWidth) {
+ this.columnWidths = new int[] { columnWidth };
+ }
+
+ protected void setColumnWidths(int[] columnWidths) {
+ this.columnWidths = columnWidths;
+ }
+
+ protected int printResult(List<QueryDataBatch> results) throws SchemaChangeException {
+ int rowCount = 0;
+ final RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
+ for(final QueryDataBatch result : results) {
+ rowCount += result.getHeader().getRowCount();
+ loader.load(result.getHeader().getDef(), result.getData());
+ // TODO: Clean: DRILL-2933: That load(...) no longer throws
+ // SchemaChangeException, so check/clean throw clause above.
+ VectorUtil.showVectorAccessibleContent(loader, columnWidths);
+ loader.clear();
+ result.release();
+ }
+ System.out.println("Total record count: " + rowCount);
+ return rowCount;
+ }
+
+ protected static String getResultString(List<QueryDataBatch> results, String delimiter)
+ throws SchemaChangeException {
+ final StringBuilder formattedResults = new StringBuilder();
+ boolean includeHeader = true;
+ final RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
+ for(final QueryDataBatch result : results) {
+ loader.load(result.getHeader().getDef(), result.getData());
+ if (loader.getRecordCount() <= 0) {
+ continue;
+ }
+ VectorUtil.appendVectorAccessibleContent(loader, formattedResults, delimiter, includeHeader);
+ if (!includeHeader) {
+ includeHeader = false;
+ }
+ loader.clear();
+ result.release();
+ }
+
+ return formattedResults.toString();
+ }
+
+ public class TestResultSet {
+
+ private final List<List<String>> rows;
+
+ public TestResultSet() {
+ rows = new ArrayList<>();
+ }
+
+ public TestResultSet(List<QueryDataBatch> batches) throws SchemaChangeException {
+ rows = new ArrayList<>();
+ convert(batches);
+ }
+
+ public void addRow(String... cells) {
+ List<String> newRow = Arrays.asList(cells);
+ rows.add(newRow);
+ }
+
+ public int size() {
+ return rows.size();
+ }
+
+ @Override public boolean equals(Object o) {
+ boolean result = false;
+
+ if (this == o) {
+ result = true;
+ } else if (o instanceof TestResultSet) {
+ TestResultSet that = (TestResultSet) o;
+ assertEquals(this.size(), that.size());
+ for (int i = 0; i < this.rows.size(); i++) {
+ assertEquals(this.rows.get(i).size(), that.rows.get(i).size());
+ for (int j = 0; j < this.rows.get(i).size(); ++j) {
+ assertEquals(this.rows.get(i).get(j), that.rows.get(i).get(j));
+ }
+ }
+ result = true;
+ }
+
+ return result;
+ }
+
+ private void convert(List<QueryDataBatch> batches) throws SchemaChangeException {
+ RecordBatchLoader loader = new RecordBatchLoader(getAllocator());
+ for (QueryDataBatch batch : batches) {
+ int rc = batch.getHeader().getRowCount();
+ if (batch.getData() != null) {
+ loader.load(batch.getHeader().getDef(), batch.getData());
+ for (int i = 0; i < rc; ++i) {
+ List<String> newRow = new ArrayList<>();
+ rows.add(newRow);
+ for (VectorWrapper<?> vw : loader) {
+ ValueVector.Accessor accessor = vw.getValueVector().getAccessor();
+ Object o = accessor.getObject(i);
+ newRow.add(o == null ? null : o.toString());
+ }
+ }
+ }
+ loader.clear();
+ batch.release();
+ }
+ }
+ }
+ }
http://git-wip-us.apache.org/repos/asf/drill/blob/acc5ed92/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java
index a80d4f6..12be961 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClientFixture.java
@@ -27,8 +27,6 @@ import java.io.StringReader;
import java.util.List;
import java.util.Properties;
-import org.apache.drill.QueryTestUtil;
-import org.apache.drill.TestBuilder;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.client.DrillClient;
import org.apache.drill.exec.memory.BufferAllocator;
@@ -341,7 +339,7 @@ public class ClientFixture implements AutoCloseable {
/**
* Execute a set of statements from a file.
- * @param stmts the set of statements, separated by semicolons
+ * @param source the set of statements, separated by semicolons
* @return the number of statements executed
*/
|