This is an automated email from the ASF dual-hosted git repository.
vitalii pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/drill.git
commit aa127b70b1e46f7f4aa19881f25eda583627830a
Author: Arina Ielchiieva <arina.yelchiyeva@gmail.com>
AuthorDate: Thu Jun 21 17:04:13 2018 +0300
DRILL-6523: Fix NPE for describe of partial schema
closes #1332
---
.../sql/handlers/DescribeSchemaHandler.java | 53 ++++++++++++----------
.../org/apache/drill/exec/sql/TestInfoSchema.java | 8 +++-
2 files changed, 37 insertions(+), 24 deletions(-)
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeSchemaHandler.java
b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeSchemaHandler.java
index f97696e..bb51ef0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeSchemaHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeSchemaHandler.java
@@ -32,10 +32,12 @@ import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.exec.physical.PhysicalPlan;
import org.apache.drill.exec.planner.sql.DirectPlan;
import org.apache.drill.exec.planner.sql.SchemaUtilites;
+import org.apache.drill.exec.store.AbstractSchema;
import org.apache.drill.exec.store.StoragePlugin;
import org.apache.drill.exec.store.dfs.FileSystemPlugin;
import org.apache.drill.exec.store.dfs.FileSystemSchemaFactory;
import org.apache.drill.exec.store.dfs.WorkspaceConfig;
+import org.apache.drill.exec.work.foreman.ForemanSetupException;
import java.util.List;
import java.util.Map;
@@ -68,33 +70,38 @@ public class DescribeSchemaHandler extends DefaultSqlHandler {
@Override
- public PhysicalPlan getPlan(SqlNode sqlNode) {
- SqlIdentifier schema = ((SqlDescribeSchema) sqlNode).getSchema();
- SchemaPlus drillSchema = SchemaUtilites.findSchema(config.getConverter().getDefaultSchema(),
schema.names);
-
- if (drillSchema != null) {
- StoragePlugin storagePlugin;
- try {
- storagePlugin = context.getStorage().getPlugin(schema.names.get(0));
- } catch (ExecutionSetupException e) {
- throw new DrillRuntimeException("Failure while retrieving storage plugin", e);
+ public PhysicalPlan getPlan(SqlNode sqlNode) throws ForemanSetupException {
+ SqlIdentifier schema = unwrap(sqlNode, SqlDescribeSchema.class).getSchema();
+ SchemaPlus schemaPlus = SchemaUtilites.findSchema(config.getConverter().getDefaultSchema(),
schema.names);
+
+ if (schemaPlus == null) {
+ throw UserException.validationError()
+ .message("Invalid schema name [%s]", Joiner.on(".").join(schema.names))
+ .build(logger);
+ }
+
+ StoragePlugin storagePlugin;
+ try {
+ AbstractSchema drillSchema = SchemaUtilites.unwrapAsDrillSchemaInstance(schemaPlus);
+ storagePlugin = context.getStorage().getPlugin(drillSchema.getSchemaPath().get(0));
+ if (storagePlugin == null) {
+ throw new DrillRuntimeException(String.format("Unable to find storage plugin with
the following name [%s].",
+ drillSchema.getSchemaPath().get(0)));
}
- String properties;
- try {
- final Map configMap = mapper.convertValue(storagePlugin.getConfig(), Map.class);
- if (storagePlugin instanceof FileSystemPlugin) {
- transformWorkspaces(schema.names, configMap);
- }
- properties = mapper.writeValueAsString(configMap);
- } catch (JsonProcessingException e) {
- throw new DrillRuntimeException("Error while trying to convert storage config to
json string", e);
+ } catch (ExecutionSetupException e) {
+ throw new DrillRuntimeException("Failure while retrieving storage plugin", e);
+ }
+
+ try {
+ Map configMap = mapper.convertValue(storagePlugin.getConfig(), Map.class);
+ if (storagePlugin instanceof FileSystemPlugin) {
+ transformWorkspaces(schema.names, configMap);
}
+ String properties = mapper.writeValueAsString(configMap);
return DirectPlan.createDirectPlan(context, new DescribeSchemaResult(Joiner.on(".").join(schema.names),
properties));
+ } catch (JsonProcessingException e) {
+ throw new DrillRuntimeException("Error while trying to convert storage config to json
string", e);
}
-
- throw UserException.validationError()
- .message(String.format("Invalid schema name [%s]", Joiner.on(".").join(schema.names)))
- .build(logger);
}
/**
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java
index a702574..e0ed2fb 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestInfoSchema.java
@@ -383,9 +383,15 @@ public class TestInfoSchema extends BaseTestQuery {
}
@Test
+ public void describePartialSchema() throws Exception {
+ test("use dfs");
+ test("describe schema tmp");
+ }
+
+ @Test
public void describeSchemaOutput() throws Exception {
final List<QueryDataBatch> result = testSqlWithResults("describe schema dfs.tmp");
- assertTrue(result.size() == 1);
+ assertEquals(1, result.size());
final QueryDataBatch batch = result.get(0);
final RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());
loader.load(batch.getHeader().getDef(), batch.getData());
|