Repository: drill
Updated Branches:
refs/heads/master 711992f22 -> 8815eb7d9
DRILL-3319: Replaced UserException#build() method with #build(Logger) method to log from the correct class
+ Fixed docs in UserException
+ Created loggers, and changed logger visibility to private
Project: http://git-wip-us.apache.org/repos/asf/drill/repo
Commit: http://git-wip-us.apache.org/repos/asf/drill/commit/8815eb7d
Tree: http://git-wip-us.apache.org/repos/asf/drill/tree/8815eb7d
Diff: http://git-wip-us.apache.org/repos/asf/drill/diff/8815eb7d
Branch: refs/heads/master
Commit: 8815eb7d947be6d2a0281c15a3a60d8ba040db95
Parents: 711992f
Author: Sudheesh Katkam <skatkam@maprtech.com>
Authored: Mon Jun 22 13:41:01 2015 -0700
Committer: Parth Chandra <parthc@apache.org>
Committed: Tue Jun 23 12:01:26 2015 -0700
----------------------------------------------------------------------
.../drill/common/exceptions/UserException.java | 67 ++++++++++----------
.../common/exceptions/TestUserException.java | 19 +++---
.../drill/exec/store/hive/HiveRecordReader.java | 2 +-
.../src/main/codegen/templates/ListWriters.java | 9 +--
.../exec/client/PrintingResultsListener.java | 4 +-
.../expr/fn/impl/AggregateErrorFunctions.java | 29 ++++-----
.../apache/drill/exec/ops/FragmentContext.java | 2 +-
.../drill/exec/ops/ViewExpansionContext.java | 2 +-
.../drill/exec/physical/impl/ScanBatch.java | 2 +-
.../physical/impl/aggregate/HashAggBatch.java | 2 +-
.../impl/aggregate/StreamingAggBatch.java | 2 +-
.../impl/flatten/FlattenRecordBatch.java | 2 +-
.../physical/impl/xsort/ExternalSortBatch.java | 2 +-
.../drill/exec/planner/logical/DrillOptiq.java | 4 +-
.../drill/exec/planner/sql/DrillSqlWorker.java | 13 ++--
.../drill/exec/planner/sql/SchemaUtilites.java | 9 +--
.../sql/handlers/CreateTableHandler.java | 14 ++--
.../planner/sql/handlers/DefaultSqlHandler.java | 23 +++----
.../sql/handlers/DescribeTableHandler.java | 7 +-
.../planner/sql/handlers/ExplainHandler.java | 10 +--
.../planner/sql/handlers/ShowFileHandler.java | 6 +-
.../planner/sql/handlers/ShowTablesHandler.java | 3 +-
.../planner/sql/handlers/SqlHandlerUtil.java | 7 +-
.../exec/planner/sql/handlers/ViewHandler.java | 10 +--
.../drill/exec/record/AbstractRecordBatch.java | 3 +-
.../org/apache/drill/exec/rpc/BasicServer.java | 2 +-
.../java/org/apache/drill/exec/rpc/RpcBus.java | 2 +-
.../drill/exec/rpc/user/QueryResultHandler.java | 12 ++--
.../apache/drill/exec/store/AbstractSchema.java | 6 +-
.../apache/drill/exec/store/TimedRunnable.java | 2 +-
.../exec/store/dfs/WorkspaceSchemaFactory.java | 23 +++----
.../exec/store/easy/json/JSONRecordReader.java | 2 +-
.../store/easy/text/compliant/TextReader.java | 2 +-
.../store/parquet/ParquetReaderUtility.java | 9 +--
.../exec/vector/complex/fn/JsonReader.java | 24 +++----
.../apache/drill/exec/work/foreman/Foreman.java | 12 ++--
.../exec/work/fragment/FragmentExecutor.java | 4 +-
.../store/parquet/ParquetResultListener.java | 2 +-
38 files changed, 183 insertions(+), 172 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/common/src/main/java/org/apache/drill/common/exceptions/UserException.java
----------------------------------------------------------------------
diff --git a/common/src/main/java/org/apache/drill/common/exceptions/UserException.java b/common/src/main/java/org/apache/drill/common/exceptions/UserException.java
index 6f28a2b..13c17bd 100644
--- a/common/src/main/java/org/apache/drill/common/exceptions/UserException.java
+++ b/common/src/main/java/org/apache/drill/common/exceptions/UserException.java
@@ -20,6 +20,7 @@ package org.apache.drill.common.exceptions;
import org.apache.drill.exec.proto.CoordinationProtos;
import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;
import org.apache.drill.exec.proto.UserBitShared.DrillPBError;
+import org.slf4j.Logger;
/**
* Base class for all user exception. The goal is to separate out common error conditions where we can give users
@@ -27,7 +28,7 @@ import org.apache.drill.exec.proto.UserBitShared.DrillPBError;
* <p>Throwing a user exception will guarantee it's message will be displayed to the user, along with any context
* information added to the exception at various levels while being sent to the client.
* <p>A specific class of user exceptions are system exception. They represent system level errors that don't display
- * any specific error message to the user apart from "A system error has occurend" along with informations to retrieve
+ * any specific error message to the user apart from "A system error has occurred" along with information to retrieve
* the details of the exception from the logs.
* <p>Although system exception should only display a generic message to the user, for now they will display the root
* error message, until all user errors are properly sent from the server side.
@@ -37,7 +38,6 @@ import org.apache.drill.exec.proto.UserBitShared.DrillPBError;
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType
*/
public class UserException extends DrillRuntimeException {
- private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UserException.class);
public static final String MEMORY_ERROR_MSG = "One or more nodes ran out of memory while executing the query.";
@@ -64,8 +64,8 @@ public class UserException extends DrillRuntimeException {
/**
* Wraps the passed exception inside a system error.
* <p>The cause message will be used unless {@link Builder#message(String, Object...)} is called.
- * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build()} instead
- * of creating a new exception. Any added context will be added to the user exception as well.
+ * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)}
+ * instead of creating a new exception. Any added context will be added to the user exception as well.
*
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#SYSTEM
*
@@ -94,8 +94,8 @@ public class UserException extends DrillRuntimeException {
/**
* Wraps the passed exception inside a connection error.
* <p>The cause message will be used unless {@link Builder#message(String, Object...)} is called.
- * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build()} instead
- * of creating a new exception. Any added context will be added to the user exception as well.
+ * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)}
+ * instead of creating a new exception. Any added context will be added to the user exception as well.
*
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#CONNECTION
*
@@ -120,8 +120,8 @@ public class UserException extends DrillRuntimeException {
/**
* Wraps the passed exception inside a data read error.
* <p>The cause message will be used unless {@link Builder#message(String, Object...)} is called.
- * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build()} instead
- * of creating a new exception. Any added context will be added to the user exception as well.
+ * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)}
+ * instead of creating a new exception. Any added context will be added to the user exception as well.
*
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#DATA_READ
*
@@ -146,8 +146,8 @@ public class UserException extends DrillRuntimeException {
/**
* Wraps the passed exception inside a data write error.
* <p>The cause message will be used unless {@link Builder#message(String, Object...)} is called.
- * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build()} instead
- * of creating a new exception. Any added context will be added to the user exception as well.
+ * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)}
+ * instead of creating a new exception. Any added context will be added to the user exception as well.
*
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#DATA_WRITE
*
@@ -172,8 +172,8 @@ public class UserException extends DrillRuntimeException {
/**
* Wraps the passed exception inside a function error.
* <p>The cause message will be used unless {@link Builder#message(String, Object...)} is called.
- * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build()} instead
- * of creating a new exception. Any added context will be added to the user exception as well.
+ * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)}
+ * instead of creating a new exception. Any added context will be added to the user exception as well.
*
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#FUNCTION
*
@@ -196,10 +196,10 @@ public class UserException extends DrillRuntimeException {
}
/**
- * Wraps the passed exception inside a system error.
+ * Wraps the passed exception inside a parse error.
* <p>The cause message will be used unless {@link Builder#message(String, Object...)} is called.
- * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build()} instead
- * of creating a new exception. Any added context will be added to the user exception as well.
+ * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)}
+ * instead of creating a new exception. Any added context will be added to the user exception as well.
*
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#PARSE
*
@@ -222,10 +222,10 @@ public class UserException extends DrillRuntimeException {
}
/**
- * wraps the passed exception inside a system error.
+ * wraps the passed exception inside a validation error.
* <p>the cause message will be used unless {@link Builder#message(String, Object...)} is called.
- * <p>if the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build()} instead
- * of creating a new exception. Any added context will be added to the user exception as well.
+ * <p>if the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)}
+ * instead of creating a new exception. Any added context will be added to the user exception as well.
*
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#VALIDATION
*
@@ -248,10 +248,10 @@ public class UserException extends DrillRuntimeException {
}
/**
- * Wraps the passed exception inside a system error.
+ * Wraps the passed exception inside a permission error.
* <p>The cause message will be used unless {@link Builder#message(String, Object...)} is called.
- * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build()} instead
- * of creating a new exception. Any added context will be added to the user exception as well.
+ * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)}
+ * instead of creating a new exception. Any added context will be added to the user exception as well.
*
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#PERMISSION
*
@@ -274,10 +274,10 @@ public class UserException extends DrillRuntimeException {
}
/**
- * Wraps the passed exception inside a system error.
+ * Wraps the passed exception inside a plan error.
* <p>The cause message will be used unless {@link Builder#message(String, Object...)} is called.
- * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build()} instead
- * of creating a new exception. Any added context will be added to the user exception as well.
+ * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)}
+ * instead of creating a new exception. Any added context will be added to the user exception as well.
*
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#PLAN
*
@@ -300,10 +300,10 @@ public class UserException extends DrillRuntimeException {
}
/**
- * Wraps the passed exception inside a system error.
+ * Wraps the passed exception inside a resource error.
* <p>The cause message will be used unless {@link Builder#message(String, Object...)} is called.
- * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build()} instead
- * of creating a new exception. Any added context will be added to the user exception as well.
+ * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)}
+ * instead of creating a new exception. Any added context will be added to the user exception as well.
*
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#RESOURCE
*
@@ -326,10 +326,10 @@ public class UserException extends DrillRuntimeException {
}
/**
- * Wraps the passed exception inside a system error.
+ * Wraps the passed exception inside a unsupported error.
* <p>The cause message will be used unless {@link Builder#message(String, Object...)} is called.
- * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build()} instead
- * of creating a new exception. Any added context will be added to the user exception as well.
+ * <p>If the wrapped exception is, or wraps, a user exception it will be returned by {@link Builder#build(Logger)}
+ * instead of creating a new exception. Any added context will be added to the user exception as well.
*
* @see org.apache.drill.exec.proto.UserBitShared.DrillPBError.ErrorType#UNSUPPORTED_OPERATION
*
@@ -502,12 +502,13 @@ public class UserException extends DrillRuntimeException {
}
/**
- * builds a user exception or returns the wrapped one.
+ * builds a user exception or returns the wrapped one. If the error is a system error, the error message is logged
+ * to the given {@link Logger}.
*
+ * @param logger the logger to write to
* @return user exception
*/
- public UserException build() {
-
+ public UserException build(final Logger logger) {
if (uex != null) {
return uex;
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/common/src/test/java/org/apache/drill/common/exceptions/TestUserException.java
----------------------------------------------------------------------
diff --git a/common/src/test/java/org/apache/drill/common/exceptions/TestUserException.java b/common/src/test/java/org/apache/drill/common/exceptions/TestUserException.java
index 151b762..8531b4f 100644
--- a/common/src/test/java/org/apache/drill/common/exceptions/TestUserException.java
+++ b/common/src/test/java/org/apache/drill/common/exceptions/TestUserException.java
@@ -26,6 +26,7 @@ import org.junit.Test;
* Test various use cases when creating user exceptions
*/
public class TestUserException {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestUserException.class);
private Exception wrap(UserException uex, int numWraps) {
Exception ex = uex;
@@ -40,7 +41,7 @@ public class TestUserException {
@Test
public void testBuildSystemException() {
String message = "This is an exception";
- UserException uex = UserException.systemError(new Exception(new RuntimeException(message))).build();
+ UserException uex = UserException.systemError(new Exception(new RuntimeException(message))).build(logger);
Assert.assertTrue(uex.getOriginalMessage().contains(message));
Assert.assertTrue(uex.getOriginalMessage().contains("RuntimeException"));
@@ -54,7 +55,7 @@ public class TestUserException {
public void testBuildUserExceptionWithMessage() {
String message = "Test message";
- UserException uex = UserException.dataWriteError().message(message).build();
+ UserException uex = UserException.dataWriteError().message(message).build(logger);
DrillPBError error = uex.getOrCreatePBError(false);
Assert.assertEquals(ErrorType.DATA_WRITE, error.getErrorType());
@@ -65,7 +66,7 @@ public class TestUserException {
public void testBuildUserExceptionWithCause() {
String message = "Test message";
- UserException uex = UserException.dataWriteError(new RuntimeException(message)).build();
+ UserException uex = UserException.dataWriteError(new RuntimeException(message)).build(logger);
DrillPBError error = uex.getOrCreatePBError(false);
// cause message should be used
@@ -78,7 +79,7 @@ public class TestUserException {
String messageA = "Test message A";
String messageB = "Test message B";
- UserException uex = UserException.dataWriteError(new RuntimeException(messageA)).message(messageB).build();
+ UserException uex = UserException.dataWriteError(new RuntimeException(messageA)).message(messageB).build(logger);
DrillPBError error = uex.getOrCreatePBError(false);
// passed message should override the cause message
@@ -92,8 +93,8 @@ public class TestUserException {
String messageA = "Test message A";
String messageB = "Test message B";
- UserException original = UserException.connectionError().message(messageA).build();
- UserException uex = UserException.dataWriteError(wrap(original, 5)).message(messageB).build();
+ UserException original = UserException.connectionError().message(messageA).build(logger);
+ UserException uex = UserException.dataWriteError(wrap(original, 5)).message(messageB).build(logger);
//builder should return the unwrapped original user exception and not build a new one
Assert.assertEquals(original, uex);
@@ -107,7 +108,7 @@ public class TestUserException {
public void testBuildUserExceptionWithFormattedMessage() {
String format = "This is test #%d";
- UserException uex = UserException.connectionError().message(format, 5).build();
+ UserException uex = UserException.connectionError().message(format, 5).build(logger);
DrillPBError error = uex.getOrCreatePBError(false);
Assert.assertEquals(ErrorType.CONNECTION, error.getErrorType());
@@ -117,10 +118,10 @@ public class TestUserException {
// make sure wrapped user exceptions are retrieved properly when calling ErrorHelper.wrap()
@Test
public void testWrapUserException() {
- UserException uex = UserException.dataReadError().message("this is a data read exception").build();
+ UserException uex = UserException.dataReadError().message("this is a data read exception").build(logger);
Exception wrapped = wrap(uex, 3);
- Assert.assertEquals(uex, UserException.systemError(wrapped).build());
+ Assert.assertEquals(uex, UserException.systemError(wrapped).build(logger));
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
----------------------------------------------------------------------
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
index 9f63e05..088fb74 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java
@@ -364,7 +364,7 @@ public class HiveRecordReader extends AbstractRecordReader {
if (context.getOptions().getOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY).bool_val == false) {
throw UserException.unsupportedError()
.message(ExecErrorConstants.DECIMAL_DISABLE_ERR_MSG)
- .build();
+ .build(logger);
}
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo) primitiveTypeInfo;
return DecimalUtility.getDecimalDataType(decimalTypeInfo.precision());
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/codegen/templates/ListWriters.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/codegen/templates/ListWriters.java b/exec/java-exec/src/main/codegen/templates/ListWriters.java
index ab78603..6d26204 100644
--- a/exec/java-exec/src/main/codegen/templates/ListWriters.java
+++ b/exec/java-exec/src/main/codegen/templates/ListWriters.java
@@ -39,7 +39,8 @@ package org.apache.drill.exec.vector.complex.impl;
/* This class is generated using freemarker and the ListWriters.java template */
@SuppressWarnings("unused")
public class ${mode}ListWriter extends AbstractFieldWriter{
-
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(${mode}ListWriter.class);
+
static enum Mode { INIT, IN_MAP, IN_LIST <#list vv.types as type><#list type.minor as minor>, IN_${minor.class?upper_case}</#list></#list> }
private final String name;
@@ -98,7 +99,7 @@ public class ${mode}ListWriter extends AbstractFieldWriter{
return writer;
}
- throw UserException.unsupportedError().message(getUnsupportedErrorMsg("MAP", mode.name())).build();
+ throw UserException.unsupportedError().message(getUnsupportedErrorMsg("MAP", mode.name())).build(logger);
}
@@ -117,7 +118,7 @@ public class ${mode}ListWriter extends AbstractFieldWriter{
return writer;
}
- throw UserException.unsupportedError().message(getUnsupportedErrorMsg("LIST", mode.name())).build();
+ throw UserException.unsupportedError().message(getUnsupportedErrorMsg("LIST", mode.name())).build(logger);
}
@@ -144,7 +145,7 @@ public class ${mode}ListWriter extends AbstractFieldWriter{
return writer;
}
- throw UserException.unsupportedError().message(getUnsupportedErrorMsg("${upperName}", mode.name())).build();
+ throw UserException.unsupportedError().message(getUnsupportedErrorMsg("${upperName}", mode.name())).build(logger);
}
</#list></#list>
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/client/PrintingResultsListener.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/client/PrintingResultsListener.java b/exec/java-exec/src/main/java/org/apache/drill/exec/client/PrintingResultsListener.java
index f5a119d..8cb4a0b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/client/PrintingResultsListener.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/client/PrintingResultsListener.java
@@ -41,6 +41,8 @@ import org.apache.drill.exec.util.VectorUtil;
import com.google.common.base.Stopwatch;
public class PrintingResultsListener implements UserResultsListener {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PrintingResultsListener.class);
+
AtomicInteger count = new AtomicInteger();
private CountDownLatch latch = new CountDownLatch(1);
RecordBatchLoader loader;
@@ -86,7 +88,7 @@ public class PrintingResultsListener implements UserResultsListener {
// TODO: Clean: DRILL-2933: That load(...) no longer throws
// SchemaChangeException, so check/clean catch clause below.
} catch (SchemaChangeException e) {
- submissionFailed(UserException.systemError(e).build());
+ submissionFailed(UserException.systemError(e).build(logger));
}
switch(format) {
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/AggregateErrorFunctions.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/AggregateErrorFunctions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/AggregateErrorFunctions.java
index 8161a43..0a9ef8d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/AggregateErrorFunctions.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/impl/AggregateErrorFunctions.java
@@ -34,6 +34,7 @@ import org.apache.drill.exec.expr.holders.VarCharHolder;
* to perform function resolution. Otherwise with implicit cast we will try to bind to an existing function.
*/
public class AggregateErrorFunctions {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AggregateErrorFunctions.class);
@FunctionTemplate(names = {"sum", "max", "avg", "stddev_pop", "stddev_samp", "stddev", "var_pop",
"var_samp", "variance"}, scope = FunctionTemplate.FunctionScope.POINT_AGGREGATE)
@@ -45,10 +46,9 @@ public class AggregateErrorFunctions {
public void setup() {
if (true) {
- throw org.apache.drill.common.exceptions.UserException
- .unsupportedError()
- .message("Only COUNT aggregate function supported for Boolean type")
- .build();
+ throw org.apache.drill.common.exceptions.UserException.unsupportedError()
+ .message("Only COUNT aggregate function supported for Boolean type")
+ .build(logger);
}
}
@@ -76,10 +76,9 @@ public class AggregateErrorFunctions {
public void setup() {
if (true) {
- throw org.apache.drill.common.exceptions.UserException
- .unsupportedError()
- .message("Only COUNT aggregate function supported for Boolean type")
- .build();
+ throw org.apache.drill.common.exceptions.UserException.unsupportedError()
+ .message("Only COUNT aggregate function supported for Boolean type")
+ .build(logger);
}
}
@@ -107,10 +106,9 @@ public class AggregateErrorFunctions {
public void setup() {
if (true) {
- throw org.apache.drill.common.exceptions.UserException
- .unsupportedError()
- .message("Only COUNT, MIN and MAX aggregate functions supported for VarChar type")
- .build();
+ throw org.apache.drill.common.exceptions.UserException.unsupportedError()
+ .message("Only COUNT, MIN and MAX aggregate functions supported for VarChar type")
+ .build(logger);
}
}
@@ -138,10 +136,9 @@ public class AggregateErrorFunctions {
public void setup() {
if (true) {
- throw org.apache.drill.common.exceptions.UserException
- .unsupportedError()
- .message("Only COUNT, MIN and MAX aggregate functions supported for VarChar type")
- .build();
+ throw org.apache.drill.common.exceptions.UserException.unsupportedError()
+ .message("Only COUNT, MIN and MAX aggregate functions supported for VarChar type")
+ .build(logger);
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
index 1cbe886..d5e85ee 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/FragmentContext.java
@@ -162,7 +162,7 @@ public class FragmentContext implements AutoCloseable, UdfUtilities {
} catch(final OutOfMemoryException | OutOfMemoryRuntimeException e) {
throw UserException.memoryError(e)
.addContext("Fragment", getHandle().getMajorFragmentId() + ":" + getHandle().getMinorFragmentId())
- .build();
+ .build(logger);
} catch(final Throwable e) {
throw new ExecutionSetupException("Failure while getting memory allocator for fragment.", e);
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java
index 157d550..53dbfd2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/ViewExpansionContext.java
@@ -108,7 +108,7 @@ public class ViewExpansionContext {
String.format("Cannot issue token for view expansion as issuing the token exceeds the " +
"maximum allowed number of user hops (%d) in chained impersonation.", maxChainedUserHops);
logger.error(errMsg);
- throw UserException.permissionError().message(errMsg).build();
+ throw UserException.permissionError().message(errMsg).build(logger);
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
index da73185..a0560a5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java
@@ -222,7 +222,7 @@ public class ScanBatch implements CloseableRecordBatch {
return IterOutcome.OK;
}
} catch (OutOfMemoryRuntimeException ex) {
- context.fail(UserException.memoryError(ex).build());
+ context.fail(UserException.memoryError(ex).build(logger));
return IterOutcome.STOP;
} catch (Exception ex) {
logger.debug("Failed to read the batch. Stopping...", ex);
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
index e1b5909..a033a8e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/HashAggBatch.java
@@ -141,7 +141,7 @@ public class HashAggBatch extends AbstractRecordBatch<HashAggregate> {
return aggregator.getOutcome();
case UPDATE_AGGREGATOR:
context.fail(UserException.unsupportedError()
- .message("Hash aggregate does not support schema changes").build());
+ .message("Hash aggregate does not support schema changes").build(logger));
close();
killIncoming(false);
return IterOutcome.STOP;
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
index b252971..5a26134 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/aggregate/StreamingAggBatch.java
@@ -188,7 +188,7 @@ public class StreamingAggBatch extends AbstractRecordBatch<StreamingAggregate> {
case UPDATE_AGGREGATOR:
context.fail(UserException.unsupportedError()
.message("Streaming aggregate does not support schema changes")
- .build());
+ .build(logger));
close();
killIncoming(false);
return IterOutcome.STOP;
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java
index 9991404..b8daceb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/flatten/FlattenRecordBatch.java
@@ -132,7 +132,7 @@ public class FlattenRecordBatch extends AbstractSingleRecordBatch<FlattenPOP> {
field.getValueClass(), typedFieldId.getFieldIds()).getValueVector());
flattener.setFlattenField(vector);
} catch (Exception ex) {
- throw UserException.unsupportedError(ex).message("Trying to flatten a non-repeated field.").build();
+ throw UserException.unsupportedError(ex).message("Trying to flatten a non-repeated field.").build(logger);
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
index 5ce63fb..02a1c08 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/xsort/ExternalSortBatch.java
@@ -427,7 +427,7 @@ public class ExternalSortBatch extends AbstractRecordBatch<ExternalSort> {
} catch (SchemaChangeException ex) {
kill(false);
context.fail(UserException.unsupportedError(ex)
- .message("Sort doesn't currently support sorts with changing schemas").build());
+ .message("Sort doesn't currently support sorts with changing schemas").build(logger));
return IterOutcome.STOP;
} catch(ClassTransformationException | IOException ex) {
kill(false);
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
index 8b95f0b..1b675bb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/DrillOptiq.java
@@ -210,7 +210,7 @@ public class DrillOptiq {
private LogicalExpression doUnknown(RexNode o){
// raise an error
throw UserException.planError().message(UNSUPPORTED_REX_NODE_ERROR +
- "RexNode Class: %s, RexNode Digest: %s", o.getClass().getName(), o.toString()).build();
+ "RexNode Class: %s, RexNode Digest: %s", o.getClass().getName(), o.toString()).build(logger);
}
@Override
public LogicalExpression visitLocalRef(RexLocalRef localRef) {
@@ -262,7 +262,7 @@ public class DrillOptiq {
throw UserException
.unsupportedError()
.message(ExecErrorConstants.DECIMAL_DISABLE_ERR_MSG)
- .build();
+ .build(logger);
}
int precision = call.getType().getPrecision();
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java
index 73aeec6..2d1bac2 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java
@@ -66,7 +66,7 @@ import org.apache.drill.exec.work.foreman.SqlUnsupportedException;
import org.apache.hadoop.security.AccessControlException;
public class DrillSqlWorker {
-// private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillSqlWorker.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillSqlWorker.class);
private static final ControlsInjector injector = ControlsInjectorFactory.getInjector(DrillSqlWorker.class);
private final Planner planner;
@@ -145,7 +145,7 @@ public class DrillSqlWorker {
injector.injectChecked(context.getExecutionControls(), "sql-parsing", ForemanSetupException.class);
sqlNode = planner.parse(sql);
} catch (SqlParseException e) {
- throw UserException.parseError(e).build();
+ throw UserException.parseError(e).build(logger);
}
AbstractSqlHandler handler;
@@ -178,12 +178,15 @@ public class DrillSqlWorker {
return handler.getPlan(sqlNode);
} catch(ValidationException e) {
String errorMessage = e.getCause() != null ? e.getCause().getMessage() : e.getMessage();
- throw UserException.parseError(e).message(errorMessage).build();
+ throw UserException.parseError(e)
+ .message(errorMessage)
+ .build(logger);
} catch (AccessControlException e) {
- throw UserException.permissionError(e).build();
+ throw UserException.permissionError(e)
+ .build(logger);
} catch(SqlUnsupportedException e) {
throw UserException.unsupportedError(e)
- .build();
+ .build(logger);
} catch (IOException | RelConversionException e) {
throw new QueryInputException("Failure handling SQL.", e);
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SchemaUtilites.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SchemaUtilites.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SchemaUtilites.java
index 655e135..b8acfcf 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SchemaUtilites.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/SchemaUtilites.java
@@ -29,6 +29,7 @@ import java.util.Collections;
import java.util.List;
public class SchemaUtilites {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SchemaUtilites.class);
public static final Joiner SCHEMA_PATH_JOINER = Joiner.on(".").skipNulls();
/**
@@ -106,7 +107,7 @@ public class SchemaUtilites {
} catch (ClassCastException e) {
throw UserException.validationError(e)
.message("Schema [%s] is not a Drill schema.", getSchemaPath(schemaPlus))
- .build();
+ .build(logger);
}
}
@@ -140,7 +141,7 @@ public class SchemaUtilites {
givenSchemaPath)
.addContext("Current default schema: ",
isRootSchema(defaultSchema) ? "No default schema selected" : getSchemaPath(defaultSchema))
- .build();
+ .build(logger);
}
/**
@@ -164,14 +165,14 @@ public class SchemaUtilites {
throw UserException.parseError()
.message("Root schema is immutable. Creating or dropping tables/views is not allowed in root schema." +
"Select a schema using 'USE schema' command.")
- .build();
+ .build(logger);
}
final AbstractSchema drillSchema = unwrapAsDrillSchemaInstance(schema);
if (!drillSchema.isMutable()) {
throw UserException.parseError()
.message("Unable to create or drop tables/views. Schema [%s] is immutable.", getSchemaPath(schema))
- .build();
+ .build(logger);
}
return drillSchema;
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java
index 920b284..1e63748 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateTableHandler.java
@@ -56,6 +56,8 @@ import org.apache.drill.exec.work.foreman.ForemanSetupException;
import org.apache.drill.exec.work.foreman.SqlUnsupportedException;
public class CreateTableHandler extends DefaultSqlHandler {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CreateTableHandler.class);
+
public CreateTableHandler(SqlHandlerConfig config, Pointer<String> textPlan) {
super(config, textPlan);
}
@@ -81,21 +83,21 @@ public class CreateTableHandler extends DefaultSqlHandler {
if (SqlHandlerUtil.getTableFromSchema(drillSchema, newTblName) != null) {
throw UserException.validationError()
.message("A table or view with given name [%s] already exists in schema [%s]", newTblName, schemaPath)
- .build();
+ .build(logger);
}
final RelNode newTblRelNodeWithPCol = SqlHandlerUtil.qualifyPartitionCol(newTblRelNode, sqlCreateTable.getPartitionColumns());
- log("Optiq Logical", newTblRelNodeWithPCol);
+ log("Optiq Logical", newTblRelNodeWithPCol, logger);
// Convert the query to Drill Logical plan and insert a writer operator on top.
DrillRel drel = convertToDrel(newTblRelNodeWithPCol, drillSchema, newTblName, sqlCreateTable.getPartitionColumns(), newTblRelNode.getRowType());
- log("Drill Logical", drel);
+ log("Drill Logical", drel, logger);
Prel prel = convertToPrel(drel, newTblRelNode.getRowType(), sqlCreateTable.getPartitionColumns());
- log("Drill Physical", prel);
+ log("Drill Physical", prel, logger);
PhysicalOperator pop = convertToPop(prel);
PhysicalPlan plan = convertToPlan(pop);
- log("Drill Plan", plan);
+ log("Drill Plan", plan, logger);
return plan;
}
@@ -186,7 +188,7 @@ public class CreateTableHandler extends DefaultSqlHandler {
if (field == null) {
throw UserException.validationError()
.message("Partition column %s is not in the SELECT list of CTAS!", colName)
- .build();
+ .build(logger);
}
partitionColumnExprs.add(RexInputRef.of(field.getIndex(), childRowType));
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
index a2858b8..376fed1 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DefaultSqlHandler.java
@@ -101,9 +101,10 @@ import org.apache.drill.exec.work.foreman.UnsupportedRelOperatorException;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
+import org.slf4j.Logger;
public class DefaultSqlHandler extends AbstractSqlHandler {
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DefaultSqlHandler.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DefaultSqlHandler.class);
protected final SqlHandlerConfig config;
protected final QueryContext context;
@@ -126,13 +127,13 @@ public class DefaultSqlHandler extends AbstractSqlHandler {
targetSliceSize = context.getOptions().getOption(ExecConstants.SLICE_TARGET).num_val;
}
- protected void log(String name, RelNode node) {
+ protected static void log(final String name, final RelNode node, final Logger logger) {
if (logger.isDebugEnabled()) {
logger.debug(name + " : \n" + RelOptUtil.toString(node, SqlExplainLevel.ALL_ATTRIBUTES));
}
}
- protected void log(String name, Prel node) {
+ protected void log(final String name, final Prel node, final Logger logger) {
String plan = PrelSequencer.printWithIds(node, SqlExplainLevel.ALL_ATTRIBUTES);
if(textPlan != null){
textPlan.value = plan;
@@ -143,7 +144,7 @@ public class DefaultSqlHandler extends AbstractSqlHandler {
}
}
- protected void log(String name, PhysicalPlan plan) throws JsonProcessingException {
+ protected void log(final String name, final PhysicalPlan plan, final Logger logger) throws JsonProcessingException {
if (logger.isDebugEnabled()) {
String planText = plan.unparse(context.getConfig().getMapper().writer());
logger.debug(name + " : \n" + planText);
@@ -157,15 +158,15 @@ public class DefaultSqlHandler extends AbstractSqlHandler {
final RelDataType validatedRowType = convertedRelNode.getValidatedRowType();
final RelNode queryRelNode = convertedRelNode.getConvertedNode();
- log("Optiq Logical", queryRelNode);
+ log("Optiq Logical", queryRelNode, logger);
DrillRel drel = convertToDrel(queryRelNode, validatedRowType);
- log("Drill Logical", drel);
+ log("Drill Logical", drel, logger);
Prel prel = convertToPrel(drel);
- log("Drill Physical", prel);
+ log("Drill Physical", prel, logger);
PhysicalOperator pop = convertToPop(prel);
PhysicalPlan plan = convertToPlan(pop);
- log("Drill Plan", plan);
+ log("Drill Plan", plan, logger);
return plan;
}
@@ -271,7 +272,7 @@ public class DefaultSqlHandler extends AbstractSqlHandler {
if (context.getPlannerSettings().isMemoryEstimationEnabled()
&& !MemoryEstimationVisitor.enoughMemory(phyRelNode, queryOptions, context.getActiveEndpoints().size())) {
- log("Not enough memory for this plan", phyRelNode);
+ log("Not enough memory for this plan", phyRelNode, logger);
logger.debug("Re-planning without hash operations.");
queryOptions.setOption(OptionValue.createBoolean(OptionValue.OptionType.QUERY, PlannerSettings.HASHJOIN.getOptionName(), false));
@@ -511,7 +512,7 @@ public class DefaultSqlHandler extends AbstractSqlHandler {
private RelNode logicalPlanningVolcanoAndLopt(RelNode relNode) throws RelConversionException, SqlUnsupportedException {
final RelNode convertedRelNode = planner.transform(DrillSqlWorker.LOGICAL_CONVERT_RULES, relNode.getTraitSet().plus(DrillRel.DRILL_LOGICAL), relNode);
- log("VolCalciteRel", convertedRelNode);
+ log("VolCalciteRel", convertedRelNode, logger);
final RelNode loptNode = getLoptJoinOrderTree(
convertedRelNode,
@@ -520,7 +521,7 @@ public class DefaultSqlHandler extends AbstractSqlHandler {
DrillRelFactories.DRILL_LOGICAL_FILTER_FACTORY,
DrillRelFactories.DRILL_LOGICAL_PROJECT_FACTORY);
- log("HepCalciteRel", loptNode);
+ log("HepCalciteRel", loptNode, logger);
return loptNode;
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeTableHandler.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeTableHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeTableHandler.java
index 676dcba..ba67971 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeTableHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DescribeTableHandler.java
@@ -44,6 +44,7 @@ import com.google.common.collect.ImmutableList;
import static org.apache.drill.exec.planner.sql.parser.DrillParserUtil.CHARSET;
public class DescribeTableHandler extends DefaultSqlHandler {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DescribeTableHandler.class);
public DescribeTableHandler(SqlHandlerConfig config) { super(config); }
@@ -74,7 +75,7 @@ public class DescribeTableHandler extends DefaultSqlHandler {
if (SchemaUtilites.isRootSchema(schema)) {
throw UserException.validationError()
.message("No schema selected.")
- .build();
+ .build(logger);
}
final String tableName = Util.last(table.names);
@@ -85,7 +86,7 @@ public class DescribeTableHandler extends DefaultSqlHandler {
if (schema.getTable(tableName) == null) {
throw UserException.validationError()
.message("Unknown table [%s] in schema [%s]", tableName, schemaPath)
- .build();
+ .build(logger);
}
SqlNode schemaCondition = null;
@@ -125,7 +126,7 @@ public class DescribeTableHandler extends DefaultSqlHandler {
} catch (Exception ex) {
throw UserException.planError(ex)
.message("Error while rewriting DESCRIBE query: %d", ex.getMessage())
- .build();
+ .build(logger);
}
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ExplainHandler.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ExplainHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ExplainHandler.java
index efc4b36..ac9af9d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ExplainHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ExplainHandler.java
@@ -43,7 +43,7 @@ import org.apache.calcite.sql.SqlLiteral;
import org.apache.calcite.sql.SqlNode;
public class ExplainHandler extends DefaultSqlHandler {
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExplainHandler.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ExplainHandler.class);
private ResultMode mode;
private SqlExplainLevel level = SqlExplainLevel.ALL_ATTRIBUTES;
@@ -57,9 +57,9 @@ public class ExplainHandler extends DefaultSqlHandler {
final RelDataType validatedRowType = convertedRelNode.getValidatedRowType();
final RelNode queryRelNode = convertedRelNode.getConvertedNode();
- log("Optiq Logical", queryRelNode);
+ log("Optiq Logical", queryRelNode, logger);
DrillRel drel = convertToDrel(queryRelNode, validatedRowType);
- log("Drill Logical", drel);
+ log("Drill Logical", drel, logger);
if (mode == ResultMode.LOGICAL) {
LogicalExplain logicalResult = new LogicalExplain(drel, level, context);
@@ -67,10 +67,10 @@ public class ExplainHandler extends DefaultSqlHandler {
}
Prel prel = convertToPrel(drel);
- log("Drill Physical", prel);
+ log("Drill Physical", prel, logger);
PhysicalOperator pop = convertToPop(prel);
PhysicalPlan plan = convertToPlan(pop);
- log("Drill Plan", plan);
+ log("Drill Plan", plan, logger);
PhysicalExplain physicalResult = new PhysicalExplain(prel, plan, level, context);
return DirectPlan.createDirectPlan(context, physicalResult);
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ShowFileHandler.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ShowFileHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ShowFileHandler.java
index c96dc73..3051279 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ShowFileHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ShowFileHandler.java
@@ -40,7 +40,7 @@ import org.apache.calcite.sql.SqlNode;
public class ShowFileHandler extends DefaultSqlHandler {
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SetOptionHandler.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SetOptionHandler.class);
public ShowFileHandler(SqlHandlerConfig config) {
super(config);
@@ -72,7 +72,7 @@ public class ShowFileHandler extends DefaultSqlHandler {
if (drillSchema == null) {
throw UserException.validationError()
.message("Invalid FROM/IN clause [%s]", from.toString())
- .build();
+ .build(logger);
}
}
@@ -83,7 +83,7 @@ public class ShowFileHandler extends DefaultSqlHandler {
throw UserException.validationError()
.message("SHOW FILES is supported in workspace type schema only. Schema [%s] is not a workspace schema.",
SchemaUtilites.getSchemaPath(drillSchema))
- .build();
+ .build(logger);
}
// Get the file system object
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ShowTablesHandler.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ShowTablesHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ShowTablesHandler.java
index 055b761..a007e9f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ShowTablesHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ShowTablesHandler.java
@@ -44,6 +44,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
public class ShowTablesHandler extends DefaultSqlHandler {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ShowTablesHandler.class);
public ShowTablesHandler(SqlHandlerConfig config) { super(config); }
@@ -73,7 +74,7 @@ public class ShowTablesHandler extends DefaultSqlHandler {
// If the default schema is a root schema, throw an error to select a default schema
throw UserException.validationError()
.message("No default schema selected. Select a schema using 'USE schema' command")
- .build();
+ .build(logger);
}
final AbstractSchema drillSchema = SchemaUtilites.unwrapAsDrillSchemaInstance(schema);
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java
index 9e7be7f..ca7a510 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/SqlHandlerUtil.java
@@ -52,6 +52,7 @@ import java.util.HashSet;
import java.util.List;
public class SqlHandlerUtil {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SqlHandlerUtil.class);
/**
* Resolve final RelNode of the new table (or view) for given table field list and new table definition.
@@ -89,7 +90,7 @@ public class SqlHandlerUtil {
final String tblType = isNewTableView ? "view" : "table";
throw UserException.validationError()
.message("%s's field list and the %s's query field list have different counts.", tblType, tblType)
- .build();
+ .build(logger);
}
// CTAS's query field list shouldn't have "*" when table's field list is specified.
@@ -99,7 +100,7 @@ public class SqlHandlerUtil {
throw UserException.validationError()
.message("%s's query field list has a '*', which is invalid when %s's field list is specified.",
tblType, tblType)
- .build();
+ .build(logger);
}
}
@@ -160,7 +161,7 @@ public class SqlHandlerUtil {
if (field == null) {
throw UserException.validationError()
.message("Partition column %s is not in the SELECT list of CTAS!", col)
- .build();
+ .build(logger);
} else {
if (field.getName().startsWith(StarColumnHelper.STAR_COLUMN)) {
colRefStarNames.add(col);
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ViewHandler.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ViewHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ViewHandler.java
index 36287a4..6024fa5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ViewHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/ViewHandler.java
@@ -40,7 +40,7 @@ import org.apache.calcite.rel.RelNode;
import org.apache.calcite.sql.SqlNode;
public abstract class ViewHandler extends DefaultSqlHandler {
- static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ViewHandler.class);
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ViewHandler.class);
protected QueryContext context;
@@ -86,7 +86,7 @@ public abstract class ViewHandler extends DefaultSqlHandler {
throw UserException.validationError()
.message("A non-view table with given name [%s] already exists in schema [%s]",
newViewName, schemaPath)
- .build();
+ .build(logger);
}
if (existingTable.getJdbcTableType() == Schema.TableType.VIEW && !createView.getReplace()) {
@@ -94,7 +94,7 @@ public abstract class ViewHandler extends DefaultSqlHandler {
throw UserException.validationError()
.message("A view with given name [%s] already exists in schema [%s]",
newViewName, schemaPath)
- .build();
+ .build(logger);
}
}
@@ -125,11 +125,11 @@ public abstract class ViewHandler extends DefaultSqlHandler {
if (existingTable != null && existingTable.getJdbcTableType() != Schema.TableType.VIEW) {
throw UserException.validationError()
.message("[%s] is not a VIEW in schema [%s]", viewToDrop, schemaPath)
- .build();
+ .build(logger);
} else if (existingTable == null) {
throw UserException.validationError()
.message("Unknown view [%s] in schema [%s].", viewToDrop, schemaPath)
- .build();
+ .build(logger);
}
drillSchema.dropView(viewToDrop);
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractRecordBatch.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractRecordBatch.java
index ff53052..8731739 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractRecordBatch.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/AbstractRecordBatch.java
@@ -130,7 +130,8 @@ public abstract class AbstractRecordBatch<T extends PhysicalOperator> implements
return IterOutcome.NONE;
case OUT_OF_MEMORY:
// because we don't support schema changes, it is safe to fail the query right away
- context.fail(UserException.memoryError().build());
+ context.fail(UserException.memoryError()
+ .build(logger));
// FALL-THROUGH
case STOP:
return IterOutcome.STOP;
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicServer.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicServer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicServer.java
index 2ebd353..c20afc0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicServer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/BasicServer.java
@@ -202,7 +202,7 @@ public abstract class BasicServer<T extends EnumLite, C extends RemoteConnection
.resourceError( e )
.addContext( "Server type", getClass().getSimpleName() )
.message( "Drillbit could not bind to port %s.", port )
- .build();
+ .build(logger);
throw bindException;
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
index 9ca09a1..c643ac5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/RpcBus.java
@@ -232,7 +232,7 @@ public abstract class RpcBus<T extends EnumLite, C extends RemoteConnection> imp
ResponseSender sender = new ResponseSenderImpl(connection, msg.coordinationId);
handle(connection, msg.rpcType, msg.pBody, msg.dBody, sender);
} catch (UserRpcException e) {
- UserException uex = UserException.systemError(e).addIdentity(e.getEndpoint()).build();
+ UserException uex = UserException.systemError(e).addIdentity(e.getEndpoint()).build(logger);
logger.error("Unexpected Error while handling request message", e);
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
index 8443948..41bb413 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/rpc/user/QueryResultHandler.java
@@ -123,7 +123,7 @@ public class QueryResultHandler {
try {
resultsListener.queryCompleted(queryState);
} catch ( Exception e ) {
- resultsListener.submissionFailed(UserException.systemError(e).build());
+ resultsListener.submissionFailed(UserException.systemError(e).build(logger));
}
} else {
logger.warn("queryState {} was ignored", queryState);
@@ -164,7 +164,7 @@ public class QueryResultHandler {
// That releases batch if successful.
} catch ( Exception e ) {
batch.release();
- resultsListener.submissionFailed(UserException.systemError(e).build());
+ resultsListener.submissionFailed(UserException.systemError(e).build(logger));
}
}
@@ -198,7 +198,7 @@ public class QueryResultHandler {
private void failAll() {
for (UserResultsListener l : queryIdToResultsListenersMap.values()) {
- l.submissionFailed(UserException.systemError(new RpcException("Received result without QueryId")).build());
+ l.submissionFailed(UserException.systemError(new RpcException("Received result without QueryId")).build(logger));
}
}
@@ -297,7 +297,7 @@ public class QueryResultHandler {
public void operationComplete(Future<Void> future) throws Exception {
resultsListener.submissionFailed(UserException.connectionError()
.message("Connection %s closed unexpectedly.", connection.getName())
- .build());
+ .build(logger));
}
}
@@ -309,7 +309,7 @@ public class QueryResultHandler {
}
closeFuture.removeListener(closeListener);
- resultsListener.submissionFailed(UserException.systemError(ex).build());
+ resultsListener.submissionFailed(UserException.systemError(ex).build(logger));
}
@@ -362,7 +362,7 @@ public class QueryResultHandler {
closeFuture.removeListener(closeListener);
// Throw an interrupted UserException?
- resultsListener.submissionFailed(UserException.systemError(ex).build());
+ resultsListener.submissionFailed(UserException.systemError(ex).build(logger));
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
index 524fe26..0ba2426 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
@@ -101,7 +101,7 @@ public abstract class AbstractSchema implements Schema, SchemaPartitionExplorer
public boolean createView(View view) throws IOException {
throw UserException.unsupportedError()
.message("Creating new view is not supported in schema [%s]", getSchemaPath())
- .build();
+ .build(logger);
}
/**
@@ -113,7 +113,7 @@ public abstract class AbstractSchema implements Schema, SchemaPartitionExplorer
public void dropView(String viewName) throws IOException {
throw UserException.unsupportedError()
.message("Dropping a view is supported in schema [%s]", getSchemaPath())
- .build();
+ .build(logger);
}
/**
@@ -125,7 +125,7 @@ public abstract class AbstractSchema implements Schema, SchemaPartitionExplorer
public CreateTableEntry createNewTable(String tableName, List<String> partitionColumns) {
throw UserException.unsupportedError()
.message("Creating new tables is not supported in schema [%s]", getSchemaPath())
- .build();
+ .build(logger);
}
/**
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/store/TimedRunnable.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/TimedRunnable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/TimedRunnable.java
index 5a35aff..c562f05 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/TimedRunnable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/TimedRunnable.java
@@ -150,7 +150,7 @@ public abstract class TimedRunnable<V> implements Runnable {
logger.error(errMsg);
throw UserException.resourceError()
.message(errMsg)
- .build();
+ .build(logger);
}
} finally {
if (!threadPool.isShutdown()) {
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
index 8e0432a..a7e83f6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
@@ -175,7 +175,7 @@ public class WorkspaceSchemaFactory {
throw UserException
.permissionError(e)
.message("Not authorized to list view tables in schema [%s]", getFullSchemaName())
- .build();
+ .build(logger);
}
} catch (Exception e) {
logger.warn("Failure while trying to list .view.drill files in workspace [{}]", getFullSchemaName(), e);
@@ -209,10 +209,9 @@ public class WorkspaceSchemaFactory {
} catch(AccessControlException e) {
if (!schemaConfig.getIgnoreAuthErrors()) {
logger.debug(e.getMessage());
- throw UserException
- .permissionError(e)
- .message("Not authorized to list or query tables in schema [%s]", getFullSchemaName())
- .build();
+ throw UserException.permissionError(e)
+ .message("Not authorized to list or query tables in schema [%s]", getFullSchemaName())
+ .build(logger);
}
} catch(IOException e) {
logger.warn("Failure while trying to list view tables in workspace [{}]", name, getFullSchemaName(), e);
@@ -226,10 +225,9 @@ public class WorkspaceSchemaFactory {
} catch (AccessControlException e) {
if (!schemaConfig.getIgnoreAuthErrors()) {
logger.debug(e.getMessage());
- throw UserException
- .permissionError(e)
- .message("Not authorized to read view [%s] in schema [%s]", name, getFullSchemaName())
- .build();
+ throw UserException.permissionError(e)
+ .message("Not authorized to read view [%s] in schema [%s]", name, getFullSchemaName())
+ .build(logger);
}
} catch (IOException e) {
logger.warn("Failure while trying to load {}.view.drill file in workspace [{}]", name, getFullSchemaName(), e);
@@ -312,10 +310,9 @@ public class WorkspaceSchemaFactory {
} catch (AccessControlException e) {
if (!schemaConfig.getIgnoreAuthErrors()) {
logger.debug(e.getMessage());
- throw UserException
- .permissionError(e)
- .message("Not authorized to read table [%s] in schema [%s]", key, getFullSchemaName())
- .build();
+ throw UserException.permissionError(e)
+ .message("Not authorized to read table [%s] in schema [%s]", key, getFullSchemaName())
+ .build(logger);
}
} catch (IOException e) {
logger.debug("Failed to create DrillTable with root {} and name {}", config.getLocation(), key, e);
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
index 0df6227..dfc4f3a 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/json/JSONRecordReader.java
@@ -158,7 +158,7 @@ public class JSONRecordReader extends AbstractRecordReader {
exceptionBuilder.pushContext("Record ", currentRecordNumberInFile())
.pushContext("File ", hadoopPath.toUri().getPath());
- throw exceptionBuilder.build();
+ throw exceptionBuilder.build(logger);
}
private long currentRecordNumberInFile() {
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java
index fec0ab4..3899509 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/compliant/TextReader.java
@@ -429,7 +429,7 @@ final class TextReader {
.message(
"Drill failed to read your text file. Drill supports up to %d columns in a text file. Your file appears to have more than that.",
RepeatedVarCharOutput.MAXIMUM_NUMBER_COLUMNS)
- .build();
+ .build(logger);
}
String message = null;
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetReaderUtility.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetReaderUtility.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetReaderUtility.java
index da480d7..2f56aa0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetReaderUtility.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetReaderUtility.java
@@ -26,12 +26,13 @@ import org.apache.drill.exec.work.ExecErrorConstants;
* Utility class where we can capture common logic between the two parquet readers
*/
public class ParquetReaderUtility {
+ private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ParquetReaderUtility.class);
+
public static void checkDecimalTypeEnabled(OptionManager options) {
if (options.getOption(PlannerSettings.ENABLE_DECIMAL_DATA_TYPE_KEY).bool_val == false) {
- throw UserException
- .unsupportedError()
- .message(ExecErrorConstants.DECIMAL_DISABLE_ERR_MSG)
- .build();
+ throw UserException.unsupportedError()
+ .message(ExecErrorConstants.DECIMAL_DISABLE_ERR_MSG)
+ .build(logger);
}
}
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
index 260ebde..5c03c02 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/complex/fn/JsonReader.java
@@ -157,7 +157,7 @@ public class JsonReader extends BaseJsonProcessor {
getExceptionWithContext(
UserException.dataReadError(), currentFieldName, null)
.message("Failure while reading JSON. (Got an invalid read state %s )", readState.toString())
- .build();
+ .build(logger);
}
return readState;
@@ -172,7 +172,7 @@ public class JsonReader extends BaseJsonProcessor {
.message("Drill attempted to unwrap a toplevel list "
+ "in your document. However, it appears that there is trailing content after this top level list. Drill only "
+ "supports querying a set of distinct maps or a single json array with multiple inner maps.")
- .build();
+ .build(logger);
}
}
@@ -188,7 +188,7 @@ public class JsonReader extends BaseJsonProcessor {
UserException.dataReadError(), currentFieldName, null)
.message("The top level of your document must either be a single array of maps or a set "
+ "of white space delimited maps.")
- .build();
+ .build(logger);
}
if(skipOuterList){
@@ -202,7 +202,7 @@ public class JsonReader extends BaseJsonProcessor {
UserException.dataReadError(), currentFieldName, null)
.message("The top level of your document must either be a single array of maps or a set "
+ "of white space delimited maps.")
- .build();
+ .build(logger);
}
}else{
@@ -219,7 +219,7 @@ public class JsonReader extends BaseJsonProcessor {
getExceptionWithContext(
UserException.dataReadError(), currentFieldName, null)
.message("Failure while parsing JSON. Ran across unexpected %s.", JsonToken.END_ARRAY)
- .build();
+ .build(logger);
}
case NOT_AVAILABLE:
@@ -230,7 +230,7 @@ public class JsonReader extends BaseJsonProcessor {
UserException.dataReadError(), currentFieldName, null)
.message("Failure while parsing JSON. Found token of [%s]. Drill currently only supports parsing "
+ "json strings that contain either lists or maps. The root object cannot be a scalar.", t)
- .build();
+ .build(logger);
}
return ReadState.WRITE_SUCCEED;
@@ -350,7 +350,7 @@ public class JsonReader extends BaseJsonProcessor {
getExceptionWithContext(
UserException.dataReadError(), currentFieldName, null)
.message("Unexpected token %s", parser.getCurrentToken())
- .build();
+ .build(logger);
}
}
@@ -417,7 +417,7 @@ public class JsonReader extends BaseJsonProcessor {
getExceptionWithContext(
UserException.dataReadError(), currentFieldName, null)
.message("Unexpected token %s", parser.getCurrentToken())
- .build();
+ .build(logger);
}
}
map.end();
@@ -499,7 +499,7 @@ public class JsonReader extends BaseJsonProcessor {
.message("Null values are not supported in lists by default. " +
"Please set `store.json.all_text_mode` to true to read lists containing nulls. " +
"Be advised that this will treat JSON null values as a string containing the word 'null'.")
- .build();
+ .build(logger);
case VALUE_NUMBER_FLOAT:
list.float8().writeFloat8(parser.getDoubleValue());
atLeastOneWrite = true;
@@ -520,10 +520,10 @@ public class JsonReader extends BaseJsonProcessor {
default:
throw UserException.dataReadError()
.message("Unexpected token %s", parser.getCurrentToken())
- .build();
+ .build(logger);
}
} catch (Exception e) {
- throw getExceptionWithContext(e, this.currentFieldName, null).build();
+ throw getExceptionWithContext(e, this.currentFieldName, null).build(logger);
}
}
list.end();
@@ -562,7 +562,7 @@ public class JsonReader extends BaseJsonProcessor {
getExceptionWithContext(
UserException.dataReadError(), currentFieldName, null)
.message("Unexpected token %s", parser.getCurrentToken())
- .build();
+ .build(logger);
}
}
list.end();
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
index 78c438b..b1e5df5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/foreman/Foreman.java
@@ -246,7 +246,7 @@ public class Foreman implements Runnable {
}
injector.injectChecked(queryContext.getExecutionControls(), "run-try-end", ForemanException.class);
} catch (final OutOfMemoryException | OutOfMemoryRuntimeException e) {
- moveToState(QueryState.FAILED, UserException.memoryError(e).build());
+ moveToState(QueryState.FAILED, UserException.memoryError(e).build(logger));
} catch (final ForemanException e) {
moveToState(QueryState.FAILED, e);
} catch (AssertionError | Exception ex) {
@@ -257,7 +257,7 @@ public class Foreman implements Runnable {
moveToState(QueryState.FAILED,
UserException.resourceError(e)
.message("One or more nodes ran out of memory while executing the query.")
- .build());
+ .build(logger));
} else {
/*
* FragmentExecutors use a DrillbitStatusListener to watch out for the death of their query's Foreman. So, if we
@@ -496,7 +496,7 @@ public class Foreman implements Runnable {
.message(
"Unable to acquire queue resources for query within timeout. Timeout for %s queue was set at %d seconds.",
queueName, queueTimeout / 1000)
- .build();
+ .build(logger);
}
}
@@ -735,7 +735,7 @@ public class Foreman implements Runnable {
final UserException uex;
if (resultException != null) {
final boolean verbose = queryContext.getOptions().getOption(ExecConstants.ENABLE_VERBOSE_ERRORS_KEY).bool_val;
- uex = UserException.systemError(resultException).addIdentity(queryContext.getCurrentEndpoint()).build();
+ uex = UserException.systemError(resultException).addIdentity(queryContext.getCurrentEndpoint()).build(logger);
resultBuilder.addError(uex.getOrCreatePBError(verbose));
} else {
uex = null;
@@ -1006,7 +1006,7 @@ public class Foreman implements Runnable {
"Exceeded timeout (%d) while waiting send intermediate work fragments to remote nodes. " +
"Sent %d and only heard response back from %d nodes.",
timeout, numIntFragments, numIntFragments - numberRemaining)
- .build();
+ .build(logger);
}
// if any of the intermediate fragment submissions failed, fail the query
@@ -1030,7 +1030,7 @@ public class Foreman implements Runnable {
throw UserException.connectionError(submissionExceptions.get(0).rpcException)
.message("Error setting up remote intermediate fragment execution")
.addContext("Nodes with failures", sb.toString())
- .build();
+ .build(logger);
}
injector.injectChecked(queryContext.getExecutionControls(), "send-fragments", ForemanException.class);
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/FragmentExecutor.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/FragmentExecutor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/FragmentExecutor.java
index a9c2b6d..a6bd692 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/FragmentExecutor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/work/fragment/FragmentExecutor.java
@@ -267,7 +267,7 @@ public class FragmentExecutor implements Runnable {
} catch (OutOfMemoryError | OutOfMemoryRuntimeException e) {
if (!(e instanceof OutOfMemoryError) || "Direct buffer memory".equals(e.getMessage())) {
- fail(UserException.memoryError(e).build());
+ fail(UserException.memoryError(e).build(logger));
} else {
// we have a heap out of memory error. The JVM in unstable, exit.
System.err.println("Node ran out of Heap memory, exiting.");
@@ -325,7 +325,7 @@ public class FragmentExecutor implements Runnable {
final UserException uex = UserException.systemError(deferredException.getAndClear())
.addIdentity(getContext().getIdentity())
.addContext("Fragment", handle.getMajorFragmentId() + ":" + handle.getMinorFragmentId())
- .build();
+ .build(logger);
listener.fail(fragmentContext.getHandle(), uex);
} else {
listener.stateChanged(fragmentContext.getHandle(), outcome);
http://git-wip-us.apache.org/repos/asf/drill/blob/8815eb7d/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetResultListener.java
----------------------------------------------------------------------
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetResultListener.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetResultListener.java
index df74f7a..9f622f5 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetResultListener.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/ParquetResultListener.java
@@ -174,7 +174,7 @@ public class ParquetResultListener implements UserResultsListener {
}
assertEquals("Record count incorrect for column: " + s, totalRecords, (long) valuesChecked.get(s));
} catch (AssertionError e) {
- submissionFailed(UserException.systemError(e).build());
+ submissionFailed(UserException.systemError(e).build(logger));
}
}
|