hive-issues mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From "ASF GitHub Bot (Jira)" <j...@apache.org>
Subject [jira] [Work logged] (HIVE-23951) Support parameterized queries in WHERE/HAVING clause
Date Mon, 03 Aug 2020 19:35:01 GMT

     [ https://issues.apache.org/jira/browse/HIVE-23951?focusedWorklogId=465876&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-465876 ]

ASF GitHub Bot logged work on HIVE-23951:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 03/Aug/20 19:34
            Start Date: 03/Aug/20 19:34
    Worklog Time Spent: 10m 
      Work Description: jcamachor commented on a change in pull request #1315:
URL: https://github.com/apache/hive/pull/1315#discussion_r464192442



##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.ExplainTask;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.tez.TezTask;
+import org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.type.ExprNodeDescExprFactory;
+import org.apache.hadoop.hive.ql.plan.BaseWork;
+import org.apache.hadoop.hive.ql.plan.ExprDynamicParamDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Analyzer for Execute statement.
+ * This analyzer
+ *  retreives cached {@link BaseSemanticAnalyzer},
+ *  makes copy of all tasks by serializing/deserializing it,
+ *  bind dynamic parameters inside cached {@link BaseSemanticAnalyzer} using values provided
+ */
+@DDLType(types = HiveParser.TOK_EXECUTE)
+public class ExecuteStatementAnalyzer extends BaseSemanticAnalyzer {
+
+  public ExecuteStatementAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  /**
+   * This class encapsulate all {@link Task} required to be copied.
+   * This is required because {@link FetchTask} list of {@link Task} may hold reference to same
+   * objects (e.g. list of result files) and are required to be serialized/de-serialized together.
+   */
+  private class PlanCopy {
+    FetchTask fetchTask;
+    List<Task<?>> tasks;
+
+    PlanCopy(FetchTask fetchTask, List<Task<?>> tasks) {
+      this.fetchTask = fetchTask;
+      this.tasks = tasks;
+    }
+
+    FetchTask getFetchTask() {
+      return fetchTask;
+    }
+
+    List<Task<?>> getTasks()  {
+      return tasks;
+    }
+  }
+
+  private String getQueryName(ASTNode root) {
+    ASTNode queryNameAST = (ASTNode)(root.getChild(1));
+    return queryNameAST.getText();
+  }
+
+  /**
+   * Utility method to create copy of provided object using kyro serialization/de-serialization.
+   */
+  private <T> T makeCopy(final Object task, Class<T> objClass) {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    SerializationUtilities.serializePlan(task, baos);
+
+    return SerializationUtilities.deserializePlan(
+        new ByteArrayInputStream(baos.toByteArray()), objClass);
+  }
+
+  /**
+   * Given a {@link BaseSemanticAnalyzer} (cached) this method make copies of all tasks
+   * (including {@link FetchTask}) and update the existing {@link ExecuteStatementAnalyzer}
+   */
+  private void createTaskCopy(final BaseSemanticAnalyzer cachedPlan) {
+    PlanCopy planCopy = new PlanCopy(cachedPlan.getFetchTask(), cachedPlan.getAllRootTasks());
+    planCopy = makeCopy(planCopy, planCopy.getClass());
+    this.setFetchTask(planCopy.getFetchTask());
+    this.rootTasks = planCopy.getTasks();
+  }
+
+  private String getParamLiteralValue(Map<Integer, ASTNode> paramMap, int paramIndex) {
+    assert(paramMap.containsKey(paramIndex));
+    ASTNode node = paramMap.get(paramIndex);
+
+    if (node.getType() == HiveParser.StringLiteral) {
+      // remove quotes
+      return BaseSemanticAnalyzer.unescapeSQLString(node.getText());
+
+    } else {
+      return node.getText();
+    }
+  }
+
+  /**
+   * This method creates a constant expression to replace the given dynamic expression.
+   * @param dynamicExpr Expression node representing Dynamic expression
+   * @param typeInfo Type info used to create constant expression from ASTNode
+   * @param parameterMap Integer to AST node map
+   */
+  private ExprNodeConstantDesc getConstant(ExprDynamicParamDesc dynamicExpr, TypeInfo typeInfo,

Review comment:
       This method should probably rely on `ExprNodeTypeCheck.genExprNode` to avoid having two different parsing / interpretation logic.
   
   A possible idea is to do something like it is done for `default` constraint values. You could rely on same logic to generate the expr node:
   ```
       TypeCheckCtx typeCheckCtx = new TypeCheckCtx(null);
       ExprNodeDesc defaultValExpr = ExprNodeTypeCheck.genExprNode(node, typeCheckCtx).get(node);
   ```
   Then verify type is matching. I think that will provide more reliable logic. What do you think?
   
   My take is that it's better to be too strict wrt type rather than generating wrong / different results.

##########
File path: parser/src/java/org/apache/hadoop/hive/ql/parse/PrepareStatementParser.g
##########
@@ -0,0 +1,66 @@
+/**
+   Licensed to the Apache Software Foundation (ASF) under one or more 
+   contributor license agreements.  See the NOTICE file distributed with 
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with 
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+*/
+parser grammar PrepareStatementParser;
+
+options
+{
+output=AST;
+ASTLabelType=ASTNode;
+backtrack=false;
+k=3;
+}
+
+@members {
+  @Override
+  public Object recoverFromMismatchedSet(IntStream input,
+      RecognitionException re, BitSet follow) throws RecognitionException {
+    throw re;
+  }
+  @Override
+  public void displayRecognitionError(String[] tokenNames,
+      RecognitionException e) {
+    gParent.errors.add(new ParseError(gParent, e, tokenNames));
+  }
+}
+
+@rulecatch {
+catch (RecognitionException e) {
+  throw e;
+}
+}
+
+//----------------------- Rules for parsing Prepare statement-----------------------------
+prepareStatement
+@init { gParent.pushMsg("prepare statement ", state); }
+@after { gParent.popMsg(state); }
+    : KW_PREPARE identifier KW_FROM queryStatementExpression
+    -> ^(TOK_PREPARE queryStatementExpression identifier)
+    ;
+
+executeStatement
+@init { gParent.pushMsg("execute statement ", state); }
+@after { gParent.popMsg(state); }
+    : KW_EXECUTE identifier KW_USING executeParamList
+    -> ^(TOK_EXECUTE executeParamList identifier)
+    ;
+
+executeParamList
+@init { gParent.pushMsg("execute param list", state); }
+@after { gParent.popMsg(state); }
+    : constant (COMMA constant)*

Review comment:
       This is what we were talking offline about allowing expressions. Just leaving a note so we do not forget to create the follow-up JIRA.

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.ExplainTask;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.tez.TezTask;
+import org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.type.ExprNodeDescExprFactory;
+import org.apache.hadoop.hive.ql.plan.BaseWork;
+import org.apache.hadoop.hive.ql.plan.ExprDynamicParamDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Analyzer for Execute statement.
+ * This analyzer
+ *  retreives cached {@link BaseSemanticAnalyzer},
+ *  makes copy of all tasks by serializing/deserializing it,
+ *  bind dynamic parameters inside cached {@link BaseSemanticAnalyzer} using values provided
+ */
+@DDLType(types = HiveParser.TOK_EXECUTE)
+public class ExecuteStatementAnalyzer extends BaseSemanticAnalyzer {
+
+  public ExecuteStatementAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  /**
+   * This class encapsulate all {@link Task} required to be copied.
+   * This is required because {@link FetchTask} list of {@link Task} may hold reference to same
+   * objects (e.g. list of result files) and are required to be serialized/de-serialized together.
+   */
+  private class PlanCopy {
+    FetchTask fetchTask;
+    List<Task<?>> tasks;
+
+    PlanCopy(FetchTask fetchTask, List<Task<?>> tasks) {
+      this.fetchTask = fetchTask;
+      this.tasks = tasks;
+    }
+
+    FetchTask getFetchTask() {
+      return fetchTask;
+    }
+
+    List<Task<?>> getTasks()  {
+      return tasks;
+    }
+  }
+
+  private String getQueryName(ASTNode root) {
+    ASTNode queryNameAST = (ASTNode)(root.getChild(1));
+    return queryNameAST.getText();
+  }
+
+  /**
+   * Utility method to create copy of provided object using kyro serialization/de-serialization.
+   */
+  private <T> T makeCopy(final Object task, Class<T> objClass) {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    SerializationUtilities.serializePlan(task, baos);
+
+    return SerializationUtilities.deserializePlan(
+        new ByteArrayInputStream(baos.toByteArray()), objClass);
+  }
+
+  /**
+   * Given a {@link BaseSemanticAnalyzer} (cached) this method make copies of all tasks
+   * (including {@link FetchTask}) and update the existing {@link ExecuteStatementAnalyzer}
+   */
+  private void createTaskCopy(final BaseSemanticAnalyzer cachedPlan) {
+    PlanCopy planCopy = new PlanCopy(cachedPlan.getFetchTask(), cachedPlan.getAllRootTasks());
+    planCopy = makeCopy(planCopy, planCopy.getClass());
+    this.setFetchTask(planCopy.getFetchTask());
+    this.rootTasks = planCopy.getTasks();
+  }
+
+  private String getParamLiteralValue(Map<Integer, ASTNode> paramMap, int paramIndex) {
+    assert(paramMap.containsKey(paramIndex));
+    ASTNode node = paramMap.get(paramIndex);
+
+    if (node.getType() == HiveParser.StringLiteral) {
+      // remove quotes
+      return BaseSemanticAnalyzer.unescapeSQLString(node.getText());
+
+    } else {
+      return node.getText();
+    }
+  }
+
+  /**
+   * This method creates a constant expression to replace the given dynamic expression.
+   * @param dynamicExpr Expression node representing Dynamic expression
+   * @param typeInfo Type info used to create constant expression from ASTNode
+   * @param parameterMap Integer to AST node map
+   */
+  private ExprNodeConstantDesc getConstant(ExprDynamicParamDesc dynamicExpr, TypeInfo typeInfo,
+      Map<Integer, ASTNode> parameterMap) {
+    assert(parameterMap.containsKey(dynamicExpr.getIndex()));
+
+    String value = getParamLiteralValue(parameterMap, dynamicExpr.getIndex());
+
+    ExprNodeDescExprFactory factory = new ExprNodeDescExprFactory();
+
+    if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
+      return factory.createBooleanConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
+      return factory.createIntConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
+      return factory.createBigintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.charTypeInfo)
+        // CHAR and VARCHAR typeinfo could differ due to different length, therefore an extra

Review comment:
       You can use `type.getPrimitiveCategory` so you do not have to care about this. Also you will be able to use a `switch` statement instead of `if...else`

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.ExplainTask;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.tez.TezTask;
+import org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.type.ExprNodeDescExprFactory;
+import org.apache.hadoop.hive.ql.plan.BaseWork;
+import org.apache.hadoop.hive.ql.plan.ExprDynamicParamDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Analyzer for Execute statement.
+ * This analyzer
+ *  retreives cached {@link BaseSemanticAnalyzer},
+ *  makes copy of all tasks by serializing/deserializing it,
+ *  bind dynamic parameters inside cached {@link BaseSemanticAnalyzer} using values provided
+ */
+@DDLType(types = HiveParser.TOK_EXECUTE)
+public class ExecuteStatementAnalyzer extends BaseSemanticAnalyzer {
+
+  public ExecuteStatementAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  /**
+   * This class encapsulate all {@link Task} required to be copied.
+   * This is required because {@link FetchTask} list of {@link Task} may hold reference to same
+   * objects (e.g. list of result files) and are required to be serialized/de-serialized together.
+   */
+  private class PlanCopy {
+    FetchTask fetchTask;
+    List<Task<?>> tasks;
+
+    PlanCopy(FetchTask fetchTask, List<Task<?>> tasks) {
+      this.fetchTask = fetchTask;
+      this.tasks = tasks;
+    }
+
+    FetchTask getFetchTask() {
+      return fetchTask;
+    }
+
+    List<Task<?>> getTasks()  {
+      return tasks;
+    }
+  }
+
+  private String getQueryName(ASTNode root) {
+    ASTNode queryNameAST = (ASTNode)(root.getChild(1));
+    return queryNameAST.getText();
+  }
+
+  /**
+   * Utility method to create copy of provided object using kyro serialization/de-serialization.
+   */
+  private <T> T makeCopy(final Object task, Class<T> objClass) {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    SerializationUtilities.serializePlan(task, baos);
+
+    return SerializationUtilities.deserializePlan(
+        new ByteArrayInputStream(baos.toByteArray()), objClass);
+  }
+
+  /**
+   * Given a {@link BaseSemanticAnalyzer} (cached) this method make copies of all tasks
+   * (including {@link FetchTask}) and update the existing {@link ExecuteStatementAnalyzer}
+   */
+  private void createTaskCopy(final BaseSemanticAnalyzer cachedPlan) {

Review comment:
       I think it would be better to create a `CachedPlan` class rather than caching the complete `SemanticAnalyzer`.
   
   In addition, doesn't `QueryPlan` have all the information you need? If it has most of it, couldn't you extend it?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/plan/ExprDynamicParamDesc.java
##########
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.hadoop.hive.common.StringInternUtils;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+/**
+ * A constant expression.

Review comment:
       nit. outdated comment?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,381 @@
+/*

Review comment:
       This analyzer should probably not be in `table/drop` folder?

##########
File path: ql/src/test/results/clientpositive/llap/prepare_plan.q.out
##########
@@ -0,0 +1,1575 @@
+PREHOOK: query: explain extended prepare pcount from select count(*) from src where key > ?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: explain extended prepare pcount from select count(*) from src where key > ?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+OPTIMIZED SQL: SELECT COUNT(*) AS `$f0`
+FROM `default`.`src`
+WHERE `key` > CAST(? AS STRING)
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  filterExpr: (key > CAST( Dynamic Parameter  index: 1 AS STRING)) (type: boolean)
+                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  GatherStats: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: (key > CAST( Dynamic Parameter  index: 1 AS STRING)) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.99
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          bucketingVersion: 2
+                          null sort order: 
+                          numBuckets: -1
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          tag: -1
+                          value expressions: _col0 (type: bigint)
+                          auto parallelism: false
+            Execution mode: llap
+            LLAP IO: no inputs
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: src
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    bucket_count -1
+                    bucketing_version 2
+                    column.name.delimiter ,
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.src
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucketing_version 2
+                      column.name.delimiter ,
+                      columns key,value
+                      columns.comments 'default','default'
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.src
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.src
+                  name: default.src
+            Truncated Path -> Alias:
+              /src [src]
+        Reducer 2 
+            Execution mode: llap
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  bucketingVersion: 2
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      properties:
+                        bucketing_version -1
+                        columns _col0
+                        columns.types bigint
+                        escape.delim \
+                        hive.serialization.extend.additional.nesting.levels true
+                        serialization.escape.crlf true
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pcount from select count(*) from src where key > ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pcount from select count(*) from src where key > ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+378
+PREHOOK: query: explain extended prepare p1 from select * from src where key > ? order by key limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: explain extended prepare p1 from select * from src where key > ? order by key limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+OPTIMIZED SQL: SELECT `key`, `value`
+FROM `default`.`src`
+WHERE `key` > CAST(? AS STRING)
+ORDER BY `key`
+LIMIT 10
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  filterExpr: (key > CAST( Dynamic Parameter  index: 1 AS STRING)) (type: boolean)
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  GatherStats: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: (key > CAST( Dynamic Parameter  index: 1 AS STRING)) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                    Top N Key Operator
+                      sort order: +
+                      keys: key (type: string)
+                      null sort order: z
+                      Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                      top n: 10
+                      Select Operator
+                        expressions: key (type: string), value (type: string)
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          bucketingVersion: 2
+                          key expressions: _col0 (type: string)
+                          null sort order: z
+                          numBuckets: -1
+                          sort order: +
+                          Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                          tag: -1
+                          value expressions: _col1 (type: string)
+                          auto parallelism: false
+            Execution mode: llap
+            LLAP IO: no inputs
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: src
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    bucket_count -1
+                    bucketing_version 2
+                    column.name.delimiter ,
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.src
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucketing_version 2
+                      column.name.delimiter ,
+                      columns key,value
+                      columns.comments 'default','default'
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.src
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.src
+                  name: default.src
+            Truncated Path -> Alias:
+              /src [src]
+        Reducer 2 
+            Execution mode: llap
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    bucketingVersion: 2
+                    compressed: false
+                    GlobalTableId: 0
+#### A masked pattern was here ####
+                    NumFilesPerFileSink: 1
+                    Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+#### A masked pattern was here ####
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        properties:
+                          bucketing_version -1
+                          columns _col0,_col1
+                          columns.types string:string
+                          escape.delim \
+                          hive.serialization.extend.additional.nesting.levels true
+                          serialization.escape.crlf true
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    TotalFiles: 1
+                    GatherStats: false
+                    MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare p1 from select * from src where key > ? order by key limit 10
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare p1 from select * from src where key > ? order by key limit 10
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+201	val_201
+202	val_202
+203	val_203
+203	val_203
+205	val_205
+205	val_205
+207	val_207
+207	val_207
+208	val_208
+208	val_208
+PREHOOK: query: prepare p1 from select * from src where key > ? order by key limit 10
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare p1 from select * from src where key > ? order by key limit 10
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+10	val_10
+100	val_100
+100	val_100
+103	val_103
+103	val_103
+104	val_104
+104	val_104
+105	val_105
+11	val_11
+111	val_111
+PREHOOK: query: explain
+    prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain
+    prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: ((cint <= Dynamic Parameter  index: 1) and (cbigint <= Dynamic Parameter  index: 2) and (cfloat <> Dynamic Parameter  index: 3)) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 183480 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((cint <= Dynamic Parameter  index: 1) and (cbigint <= Dynamic Parameter  index: 2) and (cfloat <> Dynamic Parameter  index: 3)) (type: boolean)
+                    Statistics: Num rows: 1365 Data size: 20400 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: ctinyint (type: tinyint)
+                      outputColumnNames: ctinyint
+                      Statistics: Num rows: 1365 Data size: 20400 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: avg(ctinyint)
+                        keys: ctinyint (type: tinyint)
+                        minReductionHashAggr: 0.9040293
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 131 Data size: 10352 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: tinyint)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: tinyint)
+                          Statistics: Num rows: 131 Data size: 10352 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: struct<count:bigint,sum:double,input:tinyint>)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: avg(VALUE._col0)
+                keys: KEY._col0 (type: tinyint)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 131 Data size: 1444 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: double)
+                  outputColumnNames: _col1
+                  Statistics: Num rows: 131 Data size: 1048 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (_col1 < Dynamic Parameter  index: 4) (type: boolean)
+                    Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col1 (type: double)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: ((cint <= 100) and (cbigint <= 5000000L) and (cfloat <> 0.023)) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 183480 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((cint <= 100) and (cbigint <= 5000000L) and (cfloat <> 0.023)) (type: boolean)
+                    Statistics: Num rows: 1365 Data size: 20400 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: ctinyint (type: tinyint)
+                      outputColumnNames: ctinyint
+                      Statistics: Num rows: 1365 Data size: 20400 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: avg(ctinyint)
+                        keys: ctinyint (type: tinyint)
+                        minReductionHashAggr: 0.9040293
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 131 Data size: 10352 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: tinyint)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: tinyint)
+                          Statistics: Num rows: 131 Data size: 10352 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: struct<count:bigint,sum:double,input:tinyint>)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: avg(VALUE._col0)
+                keys: KEY._col0 (type: tinyint)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 131 Data size: 1444 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: double)
+                  outputColumnNames: _col1
+                  Statistics: Num rows: 131 Data size: 1048 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (_col1 < 0.0D) (type: boolean)
+                    Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col1 (type: double)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+-51.0
+PREHOOK: query: explain
+    prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint = ?  and csmallint != ? group by cint
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain
+    prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint = ?  and csmallint != ? group by cint
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: ((ctinyint = UDFToByte(Dynamic Parameter  index: 1)) and (csmallint <> UDFToShort(Dynamic Parameter  index: 2))) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 110092 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((ctinyint = UDFToByte(Dynamic Parameter  index: 1)) and (csmallint <> UDFToShort(Dynamic Parameter  index: 2))) (type: boolean)
+                    Statistics: Num rows: 6144 Data size: 55048 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: cint (type: int)
+                      outputColumnNames: cint
+                      Statistics: Num rows: 6144 Data size: 55048 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        keys: cint (type: int)
+                        minReductionHashAggr: 0.49983722
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 3072 Data size: 33752 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: int)
+                          Statistics: Num rows: 3072 Data size: 33752 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 3072 Data size: 33752 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: bigint)
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 3072 Data size: 24576 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 3072 Data size: 24576 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint <= ?  and csmallint != ? group by cint
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint <= ?  and csmallint != ? group by cint
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: ((ctinyint <= UDFToByte(3Y)) and (csmallint <> UDFToShort(10S))) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 110092 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((ctinyint <= UDFToByte(3Y)) and (csmallint <> UDFToShort(10S))) (type: boolean)
+                    Statistics: Num rows: 4096 Data size: 36708 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: cint (type: int)
+                      outputColumnNames: cint
+                      Statistics: Num rows: 4096 Data size: 36708 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        keys: cint (type: int)
+                        minReductionHashAggr: 0.49975586
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 2048 Data size: 22504 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: int)
+                          Statistics: Num rows: 2048 Data size: 22504 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 2048 Data size: 22504 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: bigint)
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 2048 Data size: 16384 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 2048 Data size: 16384 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint <= ?  and csmallint != ? group by cint
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint <= ?  and csmallint != ? group by cint
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+553
+547
+523
+1665
+PREHOOK: query: create table tcharvchar(c char(10), v varchar(50)) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tcharvchar
+POSTHOOK: query: create table tcharvchar(c char(10), v varchar(50)) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tcharvchar
+PREHOOK: query: insert into tcharvchar values ('c1', 'v10'), ('c2', 'v100')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tcharvchar
+POSTHOOK: query: insert into tcharvchar values ('c1', 'v10'), ('c2', 'v100')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tcharvchar
+POSTHOOK: Lineage: tcharvchar.c SCRIPT []
+POSTHOOK: Lineage: tcharvchar.v SCRIPT []
+PREHOOK: query: explain prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+POSTHOOK: query: explain prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tcharvchar
+                  filterExpr: ((c = CAST( Dynamic Parameter  index: 1 AS CHAR(10))) and (v <> CAST( Dynamic Parameter  index: 2 AS varchar(50)))) (type: boolean)
+                  Statistics: Num rows: 2 Data size: 348 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((c = CAST( Dynamic Parameter  index: 1 AS CHAR(10))) and (v <> CAST( Dynamic Parameter  index: 2 AS varchar(50)))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tcharvchar
+                  filterExpr: ((c = CAST( 'c1' AS CHAR(10))) and (v <> CAST( 'v1' AS varchar(50)))) (type: boolean)
+                  Statistics: Num rows: 2 Data size: 348 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((c = CAST( 'c1' AS CHAR(10))) and (v <> CAST( 'v1' AS varchar(50)))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+1
+PREHOOK: query: drop table tcharvchar
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tcharvchar
+PREHOOK: Output: default@tcharvchar
+POSTHOOK: query: drop table tcharvchar
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tcharvchar
+POSTHOOK: Output: default@tcharvchar
+PREHOOK: query: create table tdatets(t timestamp, d date, dc decimal(10,2)) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tdatets
+POSTHOOK: query: create table tdatets(t timestamp, d date, dc decimal(10,2)) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tdatets
+PREHOOK: query: insert into tdatets values ( cast('2011-01-01 00:00:00' as timestamp), cast('1919-11-01' as date), 5.00)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tdatets
+POSTHOOK: query: insert into tdatets values ( cast('2011-01-01 00:00:00' as timestamp), cast('1919-11-01' as date), 5.00)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tdatets
+POSTHOOK: Lineage: tdatets.d SCRIPT []
+POSTHOOK: Lineage: tdatets.dc SCRIPT []
+POSTHOOK: Lineage: tdatets.t SCRIPT []
+PREHOOK: query: insert into tdatets values ( cast('2010-01-01 04:00:00' as timestamp), cast('1918-11-01' as date), 4.00)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tdatets
+POSTHOOK: query: insert into tdatets values ( cast('2010-01-01 04:00:00' as timestamp), cast('1918-11-01' as date), 4.00)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tdatets
+POSTHOOK: Lineage: tdatets.d SCRIPT []
+POSTHOOK: Lineage: tdatets.dc SCRIPT []
+POSTHOOK: Lineage: tdatets.t SCRIPT []
+PREHOOK: query: explain
+    prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+POSTHOOK: query: explain
+    prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tdatets
+                  filterExpr: ((t <> CAST( Dynamic Parameter  index: 1 AS TIMESTAMP)) and (d <> CAST( Dynamic Parameter  index: 2 AS DATE)) and (dc > CAST( Dynamic Parameter  index: 3 AS decimal(10,2)))) (type: boolean)
+                  Statistics: Num rows: 2 Data size: 416 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((t <> CAST( Dynamic Parameter  index: 1 AS TIMESTAMP)) and (d <> CAST( Dynamic Parameter  index: 2 AS DATE)) and (dc > CAST( Dynamic Parameter  index: 3 AS decimal(10,2)))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+POSTHOOK: query: prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tdatets
+                  filterExpr: ((t <> CAST( TIMESTAMP'2012-01-01 00:01:01' AS TIMESTAMP)) and (d <> CAST( DATE'2020-01-01' AS DATE)) and (dc > CAST( 1 AS decimal(10,2)))) (type: boolean)
+                  Statistics: Num rows: 2 Data size: 416 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((t <> CAST( TIMESTAMP'2012-01-01 00:01:01' AS TIMESTAMP)) and (d <> CAST( DATE'2020-01-01' AS DATE)) and (dc > CAST( 1 AS decimal(10,2)))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+POSTHOOK: query: prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+2
+PREHOOK: query: drop table tdatets
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tdatets
+PREHOOK: Output: default@tdatets
+POSTHOOK: query: drop table tdatets
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tdatets
+POSTHOOK: Output: default@tdatets
+PREHOOK: query: explain prepare p2 from select min(ctinyint), max(cbigint) from alltypesorc where cint > (? + ? + ?) group by ctinyint
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain prepare p2 from select min(ctinyint), max(cbigint) from alltypesorc where cint > (? + ? + ?) group by ctinyint
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: (cint > UDFToInteger(((Dynamic Parameter  index: 1 + Dynamic Parameter  index: 2) + Dynamic Parameter  index: 3))) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 146784 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (cint > UDFToInteger(((Dynamic Parameter  index: 1 + Dynamic Parameter  index: 2) + Dynamic Parameter  index: 3))) (type: boolean)
+                    Statistics: Num rows: 4096 Data size: 48944 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: ctinyint (type: tinyint), cbigint (type: bigint)
+                      outputColumnNames: ctinyint, cbigint
+                      Statistics: Num rows: 4096 Data size: 48944 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: min(ctinyint), max(cbigint)
+                        keys: ctinyint (type: tinyint)
+                        minReductionHashAggr: 0.9680176
+                        mode: hash
+                        outputColumnNames: _col0, _col1, _col2
+                        Statistics: Num rows: 131 Data size: 1968 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: tinyint)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: tinyint)
+                          Statistics: Num rows: 131 Data size: 1968 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: tinyint), _col2 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: min(VALUE._col0), max(VALUE._col1)
+                keys: KEY._col0 (type: tinyint)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 131 Data size: 1968 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: tinyint), _col2 (type: bigint)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 131 Data size: 1572 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 131 Data size: 1572 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare p2 from select min(ctinyint), max(cbigint) from alltypesorc where cint > (? + ? + ?) group by ctinyint
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare p2 from select min(ctinyint), max(cbigint) from alltypesorc where cint > (? + ? + ?) group by ctinyint
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+-64	NULL

Review comment:
       Add `order by` to get consistent order.

##########
File path: parser/src/java/org/apache/hadoop/hive/ql/parse/HiveLexerParent.g
##########
@@ -78,6 +78,7 @@ KW_SORT: 'SORT';
 KW_UNION: 'UNION';
 KW_EXCEPT: 'EXCEPT';
 KW_LOAD: 'LOAD';
+KW_PREPARE: 'PREPARE';

Review comment:
       Can we add a test to `TestSQL11ReservedKeyWordsNegative.java` for the new keyword?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.ExplainTask;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.tez.TezTask;
+import org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.type.ExprNodeDescExprFactory;
+import org.apache.hadoop.hive.ql.plan.BaseWork;
+import org.apache.hadoop.hive.ql.plan.ExprDynamicParamDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Analyzer for Execute statement.
+ * This analyzer
+ *  retreives cached {@link BaseSemanticAnalyzer},
+ *  makes copy of all tasks by serializing/deserializing it,
+ *  bind dynamic parameters inside cached {@link BaseSemanticAnalyzer} using values provided
+ */
+@DDLType(types = HiveParser.TOK_EXECUTE)
+public class ExecuteStatementAnalyzer extends BaseSemanticAnalyzer {
+
+  public ExecuteStatementAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  /**
+   * This class encapsulate all {@link Task} required to be copied.
+   * This is required because {@link FetchTask} list of {@link Task} may hold reference to same
+   * objects (e.g. list of result files) and are required to be serialized/de-serialized together.
+   */
+  private class PlanCopy {
+    FetchTask fetchTask;
+    List<Task<?>> tasks;
+
+    PlanCopy(FetchTask fetchTask, List<Task<?>> tasks) {
+      this.fetchTask = fetchTask;
+      this.tasks = tasks;
+    }
+
+    FetchTask getFetchTask() {
+      return fetchTask;
+    }
+
+    List<Task<?>> getTasks()  {
+      return tasks;
+    }
+  }
+
+  private String getQueryName(ASTNode root) {
+    ASTNode queryNameAST = (ASTNode)(root.getChild(1));
+    return queryNameAST.getText();
+  }
+
+  /**
+   * Utility method to create copy of provided object using kyro serialization/de-serialization.
+   */
+  private <T> T makeCopy(final Object task, Class<T> objClass) {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    SerializationUtilities.serializePlan(task, baos);
+
+    return SerializationUtilities.deserializePlan(
+        new ByteArrayInputStream(baos.toByteArray()), objClass);
+  }
+
+  /**
+   * Given a {@link BaseSemanticAnalyzer} (cached) this method make copies of all tasks
+   * (including {@link FetchTask}) and update the existing {@link ExecuteStatementAnalyzer}
+   */
+  private void createTaskCopy(final BaseSemanticAnalyzer cachedPlan) {
+    PlanCopy planCopy = new PlanCopy(cachedPlan.getFetchTask(), cachedPlan.getAllRootTasks());
+    planCopy = makeCopy(planCopy, planCopy.getClass());
+    this.setFetchTask(planCopy.getFetchTask());
+    this.rootTasks = planCopy.getTasks();
+  }
+
+  private String getParamLiteralValue(Map<Integer, ASTNode> paramMap, int paramIndex) {
+    assert(paramMap.containsKey(paramIndex));
+    ASTNode node = paramMap.get(paramIndex);
+
+    if (node.getType() == HiveParser.StringLiteral) {
+      // remove quotes
+      return BaseSemanticAnalyzer.unescapeSQLString(node.getText());
+
+    } else {
+      return node.getText();
+    }
+  }
+
+  /**
+   * This method creates a constant expression to replace the given dynamic expression.
+   * @param dynamicExpr Expression node representing Dynamic expression
+   * @param typeInfo Type info used to create constant expression from ASTNode
+   * @param parameterMap Integer to AST node map
+   */
+  private ExprNodeConstantDesc getConstant(ExprDynamicParamDesc dynamicExpr, TypeInfo typeInfo,
+      Map<Integer, ASTNode> parameterMap) {
+    assert(parameterMap.containsKey(dynamicExpr.getIndex()));
+
+    String value = getParamLiteralValue(parameterMap, dynamicExpr.getIndex());
+
+    ExprNodeDescExprFactory factory = new ExprNodeDescExprFactory();
+
+    if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
+      return factory.createBooleanConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
+      return factory.createIntConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
+      return factory.createBigintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.charTypeInfo)
+        // CHAR and VARCHAR typeinfo could differ due to different length, therefore an extra
+        // check is used (based on instanceof) to determine if it is char/varchar types
+        || typeInfo instanceof CharTypeInfo) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.varcharTypeInfo)
+        || typeInfo instanceof VarcharTypeInfo) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
+      return factory.createFloatConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
+      return factory.createDoubleConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.byteTypeInfo)) {
+      return factory.createTinyintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.shortTypeInfo)) {
+      return factory.createSmallintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.dateTypeInfo)) {
+      return factory.createDateConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
+      return factory.createTimestampConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intervalYearMonthTypeInfo)) {
+      return factory.createIntervalYearMonthConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intervalDayTimeTypeInfo)) {
+      return factory.createIntervalDayTimeConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.binaryTypeInfo)) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    }
+    // we will let constant expression itself infer the type
+    return new ExprNodeConstantDesc(parameterMap.get(dynamicExpr.getIndex()));
+  }
+
+  /**
+   * Given a list of expressions this method traverse the expression tree and replaces
+   * all {@link ExprDynamicParamDesc} nodes with constant expression.
+   * @param exprList
+   * @param paramMap
+   */
+  private List<ExprNodeDesc> replaceDynamicParamsInExprList(List<ExprNodeDesc> exprList,
+      Map<Integer, ASTNode> paramMap) {
+    List<ExprNodeDesc> updatedExprList = new ArrayList<>();
+    for (ExprNodeDesc expr:exprList) {
+      expr = replaceDynamicParamsWithConstant(expr, expr.getTypeInfo(), paramMap);
+      updatedExprList.add(expr);
+    }
+    return updatedExprList;
+  }
+
+  /**
+   * Given an expression tree root at expr and type info of the expression this method traverse
+   * the expression tree and replaces all dynamic expression with the constant expression.
+   * This method also does type inference for the new constant expression.
+   * Note about type inference
+   * Since dynamic parameter lacks type we need to figure out appropriate type to create constant
+   * out of string value. To do this, we choose the type of first child of the parent expression
+   * which isn't dynamic parameter
+   */
+  private ExprNodeDesc replaceDynamicParamsWithConstant(ExprNodeDesc expr, TypeInfo typeInfo,
+      Map<Integer, ASTNode> paramMap) {
+    if (expr.getChildren() == null || expr.getChildren().isEmpty()) {
+      if (expr instanceof ExprDynamicParamDesc) {
+        return getConstant((ExprDynamicParamDesc)expr, typeInfo, paramMap);
+      }
+      return expr;
+    }
+
+    for(ExprNodeDesc child:expr.getChildren()) {
+      // we need typeinfo
+      if(child instanceof ExprDynamicParamDesc) {
+        continue;
+      } else if( child.getTypeInfo() != TypeInfoFactory.voidTypeInfo
+          && !child.getTypeInfo().getTypeName().equals(
+          TypeInfoFactory.voidTypeInfo.getTypeName())){
+        typeInfo = child.getTypeInfo();
+        break;
+      }
+    }
+    assert(typeInfo != null);

Review comment:
       `Preconditions` instead of `assert`?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/Driver.java
##########
@@ -433,6 +433,12 @@ private void runInternal(String command, boolean alreadyCompiled) throws Command
 
     driverState.lock();
     try {
+      if (driverContext != null && driverContext.getPlan() != null
+          && driverContext.getPlan().getIsPrepareQuery()
+          && !driverContext.getPlan().isExplain()) {
+        //TODO: LOG

Review comment:
       TODO ?

##########
File path: ql/src/test/queries/clientpositive/prepare_plan.q
##########
@@ -0,0 +1,113 @@
+--! qt:dataset:src
+--! qt:dataset:alltypesorc
+
+set hive.explain.user=false;
+set hive.vectorized.execution.enabled=false;
+
+explain extended prepare pcount from select count(*) from src where key > ?;
+prepare pcount from select count(*) from src where key > ?;
+execute pcount using 200;
+
+-- single param
+explain extended prepare p1 from select * from src where key > ? order by key limit 10;
+prepare p1 from select * from src where key > ? order by key limit 10;
+
+execute p1 using 200;
+
+-- same query, different param
+execute p1 using 0;
+
+-- same query, negative param
+--TODO: fails (constant in grammar do not support negatives)
+-- execute p1 using -1;

Review comment:
       Follow-up JIRA?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/Compiler.java
##########
@@ -338,12 +339,22 @@ private QueryPlan createPlan(BaseSemanticAnalyzer sem) {
     plan.setOptimizedCBOPlan(context.getCalcitePlan());
     plan.setOptimizedQueryString(context.getOptimizedSql());
 
+    // this is require so that later driver can skip executing prepare queries

Review comment:
       nit. `required`

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/QueryPlan.java
##########
@@ -121,6 +121,8 @@
   private final DDLDescWithWriteId acidDdlDesc;
   private Boolean autoCommitValue;
 
+  private Boolean isPrepareQuery;

Review comment:
       nit. `isPrepareQuery` -> `prepareQuery`. Please change setters and getters accordingly below, e.g., `getIsPrepareQuery` -> `isPrepareQuery`.

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.ExplainTask;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.tez.TezTask;
+import org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.type.ExprNodeDescExprFactory;
+import org.apache.hadoop.hive.ql.plan.BaseWork;
+import org.apache.hadoop.hive.ql.plan.ExprDynamicParamDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Analyzer for Execute statement.
+ * This analyzer
+ *  retreives cached {@link BaseSemanticAnalyzer},
+ *  makes copy of all tasks by serializing/deserializing it,
+ *  bind dynamic parameters inside cached {@link BaseSemanticAnalyzer} using values provided
+ */
+@DDLType(types = HiveParser.TOK_EXECUTE)
+public class ExecuteStatementAnalyzer extends BaseSemanticAnalyzer {
+
+  public ExecuteStatementAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  /**
+   * This class encapsulate all {@link Task} required to be copied.
+   * This is required because {@link FetchTask} list of {@link Task} may hold reference to same
+   * objects (e.g. list of result files) and are required to be serialized/de-serialized together.
+   */
+  private class PlanCopy {
+    FetchTask fetchTask;
+    List<Task<?>> tasks;
+
+    PlanCopy(FetchTask fetchTask, List<Task<?>> tasks) {
+      this.fetchTask = fetchTask;
+      this.tasks = tasks;
+    }
+
+    FetchTask getFetchTask() {
+      return fetchTask;
+    }
+
+    List<Task<?>> getTasks()  {
+      return tasks;
+    }
+  }
+
+  private String getQueryName(ASTNode root) {
+    ASTNode queryNameAST = (ASTNode)(root.getChild(1));
+    return queryNameAST.getText();
+  }
+
+  /**
+   * Utility method to create copy of provided object using kyro serialization/de-serialization.

Review comment:
       nit. typo: `kyro`

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/parse/type/TypeCheckProcFactory.java
##########
@@ -283,6 +283,33 @@ public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
 
   }
 
+  /**
+   * Processor for processing Dynamic expression.
+   */
+  public class DynamicParameterProcessor implements SemanticNodeProcessor {
+
+    @Override
+    public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
+        Object... nodeOutputs) throws SemanticException {
+      TypeCheckCtx ctx = (TypeCheckCtx) procCtx;
+      if (ctx.getError() != null) {
+        return null;
+      }
+
+      T desc = processGByExpr(nd, procCtx);

Review comment:
       Is this correct? Could we leave a comment stating why the call to `processGByExpr` is needed?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.ExplainTask;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.tez.TezTask;
+import org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.type.ExprNodeDescExprFactory;
+import org.apache.hadoop.hive.ql.plan.BaseWork;
+import org.apache.hadoop.hive.ql.plan.ExprDynamicParamDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Analyzer for Execute statement.
+ * This analyzer
+ *  retreives cached {@link BaseSemanticAnalyzer},

Review comment:
       typo. `retreives`

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.ExplainTask;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.tez.TezTask;
+import org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.type.ExprNodeDescExprFactory;
+import org.apache.hadoop.hive.ql.plan.BaseWork;
+import org.apache.hadoop.hive.ql.plan.ExprDynamicParamDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Analyzer for Execute statement.
+ * This analyzer
+ *  retreives cached {@link BaseSemanticAnalyzer},
+ *  makes copy of all tasks by serializing/deserializing it,
+ *  bind dynamic parameters inside cached {@link BaseSemanticAnalyzer} using values provided
+ */
+@DDLType(types = HiveParser.TOK_EXECUTE)
+public class ExecuteStatementAnalyzer extends BaseSemanticAnalyzer {
+
+  public ExecuteStatementAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  /**
+   * This class encapsulate all {@link Task} required to be copied.
+   * This is required because {@link FetchTask} list of {@link Task} may hold reference to same
+   * objects (e.g. list of result files) and are required to be serialized/de-serialized together.
+   */
+  private class PlanCopy {
+    FetchTask fetchTask;
+    List<Task<?>> tasks;
+
+    PlanCopy(FetchTask fetchTask, List<Task<?>> tasks) {
+      this.fetchTask = fetchTask;
+      this.tasks = tasks;
+    }
+
+    FetchTask getFetchTask() {
+      return fetchTask;
+    }
+
+    List<Task<?>> getTasks()  {
+      return tasks;
+    }
+  }
+
+  private String getQueryName(ASTNode root) {
+    ASTNode queryNameAST = (ASTNode)(root.getChild(1));
+    return queryNameAST.getText();
+  }
+
+  /**
+   * Utility method to create copy of provided object using kyro serialization/de-serialization.
+   */
+  private <T> T makeCopy(final Object task, Class<T> objClass) {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    SerializationUtilities.serializePlan(task, baos);
+
+    return SerializationUtilities.deserializePlan(
+        new ByteArrayInputStream(baos.toByteArray()), objClass);
+  }
+
+  /**
+   * Given a {@link BaseSemanticAnalyzer} (cached) this method make copies of all tasks
+   * (including {@link FetchTask}) and update the existing {@link ExecuteStatementAnalyzer}
+   */
+  private void createTaskCopy(final BaseSemanticAnalyzer cachedPlan) {
+    PlanCopy planCopy = new PlanCopy(cachedPlan.getFetchTask(), cachedPlan.getAllRootTasks());
+    planCopy = makeCopy(planCopy, planCopy.getClass());
+    this.setFetchTask(planCopy.getFetchTask());
+    this.rootTasks = planCopy.getTasks();
+  }
+
+  private String getParamLiteralValue(Map<Integer, ASTNode> paramMap, int paramIndex) {
+    assert(paramMap.containsKey(paramIndex));
+    ASTNode node = paramMap.get(paramIndex);
+
+    if (node.getType() == HiveParser.StringLiteral) {
+      // remove quotes
+      return BaseSemanticAnalyzer.unescapeSQLString(node.getText());
+
+    } else {
+      return node.getText();
+    }
+  }
+
+  /**
+   * This method creates a constant expression to replace the given dynamic expression.
+   * @param dynamicExpr Expression node representing Dynamic expression
+   * @param typeInfo Type info used to create constant expression from ASTNode
+   * @param parameterMap Integer to AST node map
+   */
+  private ExprNodeConstantDesc getConstant(ExprDynamicParamDesc dynamicExpr, TypeInfo typeInfo,
+      Map<Integer, ASTNode> parameterMap) {
+    assert(parameterMap.containsKey(dynamicExpr.getIndex()));
+
+    String value = getParamLiteralValue(parameterMap, dynamicExpr.getIndex());
+
+    ExprNodeDescExprFactory factory = new ExprNodeDescExprFactory();
+
+    if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
+      return factory.createBooleanConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
+      return factory.createIntConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
+      return factory.createBigintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.charTypeInfo)
+        // CHAR and VARCHAR typeinfo could differ due to different length, therefore an extra
+        // check is used (based on instanceof) to determine if it is char/varchar types
+        || typeInfo instanceof CharTypeInfo) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.varcharTypeInfo)
+        || typeInfo instanceof VarcharTypeInfo) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
+      return factory.createFloatConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
+      return factory.createDoubleConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.byteTypeInfo)) {
+      return factory.createTinyintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.shortTypeInfo)) {
+      return factory.createSmallintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.dateTypeInfo)) {
+      return factory.createDateConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
+      return factory.createTimestampConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intervalYearMonthTypeInfo)) {
+      return factory.createIntervalYearMonthConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intervalDayTimeTypeInfo)) {
+      return factory.createIntervalDayTimeConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.binaryTypeInfo)) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);

Review comment:
       I think we talked about this offline. Do we support specifying binary constants in the query? If we do not, should we just throw some exception?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/parse/type/ExprFactory.java
##########
@@ -112,40 +112,45 @@ protected boolean isAllConstants(List<T> exprs) {
    */
   protected abstract T createNullConstantExpr();
 
+  /**
+   * Creates a dynamic parameter expression with void type.
+   */
+  protected abstract T createDynamicParamExpr(int index);
+
   /**
    * Creates a boolean constant expression from input value.
    */
-  protected abstract T createBooleanConstantExpr(String value);
+  public abstract T createBooleanConstantExpr(String value);

Review comment:
       If we rely on `ExprNodeTypeCheck.genExprNode`, this visibility does not need to change (it makes sense to limit the scope of these methods).

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/stats/StatsUtils.java
##########
@@ -1619,6 +1620,9 @@ public static ColStatistics getColStatisticsFromExpression(HiveConf conf, Statis
       colName = enfd.getFieldName();
       colType = enfd.getTypeString();
       countDistincts = numRows;
+    } else if (end instanceof ExprDynamicParamDesc) {
+      //skip colecting stats for parameters

Review comment:
       typo. colecting
   
   What will happen in this case? Could we add a few lines explaining the behavior?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.ExplainTask;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.tez.TezTask;
+import org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.type.ExprNodeDescExprFactory;
+import org.apache.hadoop.hive.ql.plan.BaseWork;
+import org.apache.hadoop.hive.ql.plan.ExprDynamicParamDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Analyzer for Execute statement.
+ * This analyzer
+ *  retreives cached {@link BaseSemanticAnalyzer},
+ *  makes copy of all tasks by serializing/deserializing it,
+ *  bind dynamic parameters inside cached {@link BaseSemanticAnalyzer} using values provided
+ */
+@DDLType(types = HiveParser.TOK_EXECUTE)
+public class ExecuteStatementAnalyzer extends BaseSemanticAnalyzer {
+
+  public ExecuteStatementAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  /**
+   * This class encapsulate all {@link Task} required to be copied.
+   * This is required because {@link FetchTask} list of {@link Task} may hold reference to same
+   * objects (e.g. list of result files) and are required to be serialized/de-serialized together.
+   */
+  private class PlanCopy {
+    FetchTask fetchTask;
+    List<Task<?>> tasks;
+
+    PlanCopy(FetchTask fetchTask, List<Task<?>> tasks) {
+      this.fetchTask = fetchTask;
+      this.tasks = tasks;
+    }
+
+    FetchTask getFetchTask() {
+      return fetchTask;
+    }
+
+    List<Task<?>> getTasks()  {
+      return tasks;
+    }
+  }
+
+  private String getQueryName(ASTNode root) {
+    ASTNode queryNameAST = (ASTNode)(root.getChild(1));
+    return queryNameAST.getText();
+  }
+
+  /**
+   * Utility method to create copy of provided object using kyro serialization/de-serialization.
+   */
+  private <T> T makeCopy(final Object task, Class<T> objClass) {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    SerializationUtilities.serializePlan(task, baos);
+
+    return SerializationUtilities.deserializePlan(
+        new ByteArrayInputStream(baos.toByteArray()), objClass);
+  }
+
+  /**
+   * Given a {@link BaseSemanticAnalyzer} (cached) this method make copies of all tasks
+   * (including {@link FetchTask}) and update the existing {@link ExecuteStatementAnalyzer}
+   */
+  private void createTaskCopy(final BaseSemanticAnalyzer cachedPlan) {
+    PlanCopy planCopy = new PlanCopy(cachedPlan.getFetchTask(), cachedPlan.getAllRootTasks());
+    planCopy = makeCopy(planCopy, planCopy.getClass());
+    this.setFetchTask(planCopy.getFetchTask());
+    this.rootTasks = planCopy.getTasks();
+  }
+
+  private String getParamLiteralValue(Map<Integer, ASTNode> paramMap, int paramIndex) {
+    assert(paramMap.containsKey(paramIndex));
+    ASTNode node = paramMap.get(paramIndex);
+
+    if (node.getType() == HiveParser.StringLiteral) {
+      // remove quotes
+      return BaseSemanticAnalyzer.unescapeSQLString(node.getText());
+
+    } else {
+      return node.getText();
+    }
+  }
+
+  /**
+   * This method creates a constant expression to replace the given dynamic expression.
+   * @param dynamicExpr Expression node representing Dynamic expression
+   * @param typeInfo Type info used to create constant expression from ASTNode
+   * @param parameterMap Integer to AST node map
+   */
+  private ExprNodeConstantDesc getConstant(ExprDynamicParamDesc dynamicExpr, TypeInfo typeInfo,
+      Map<Integer, ASTNode> parameterMap) {
+    assert(parameterMap.containsKey(dynamicExpr.getIndex()));
+
+    String value = getParamLiteralValue(parameterMap, dynamicExpr.getIndex());
+
+    ExprNodeDescExprFactory factory = new ExprNodeDescExprFactory();
+
+    if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
+      return factory.createBooleanConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
+      return factory.createIntConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
+      return factory.createBigintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.charTypeInfo)
+        // CHAR and VARCHAR typeinfo could differ due to different length, therefore an extra
+        // check is used (based on instanceof) to determine if it is char/varchar types
+        || typeInfo instanceof CharTypeInfo) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.varcharTypeInfo)
+        || typeInfo instanceof VarcharTypeInfo) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
+      return factory.createFloatConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
+      return factory.createDoubleConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.byteTypeInfo)) {
+      return factory.createTinyintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.shortTypeInfo)) {
+      return factory.createSmallintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.dateTypeInfo)) {
+      return factory.createDateConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
+      return factory.createTimestampConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intervalYearMonthTypeInfo)) {
+      return factory.createIntervalYearMonthConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intervalDayTimeTypeInfo)) {
+      return factory.createIntervalDayTimeConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.binaryTypeInfo)) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    }
+    // we will let constant expression itself infer the type
+    return new ExprNodeConstantDesc(parameterMap.get(dynamicExpr.getIndex()));
+  }
+
+  /**
+   * Given a list of expressions this method traverse the expression tree and replaces
+   * all {@link ExprDynamicParamDesc} nodes with constant expression.
+   * @param exprList
+   * @param paramMap
+   */
+  private List<ExprNodeDesc> replaceDynamicParamsInExprList(List<ExprNodeDesc> exprList,
+      Map<Integer, ASTNode> paramMap) {
+    List<ExprNodeDesc> updatedExprList = new ArrayList<>();
+    for (ExprNodeDesc expr:exprList) {
+      expr = replaceDynamicParamsWithConstant(expr, expr.getTypeInfo(), paramMap);
+      updatedExprList.add(expr);
+    }
+    return updatedExprList;
+  }
+
+  /**
+   * Given an expression tree root at expr and type info of the expression this method traverse
+   * the expression tree and replaces all dynamic expression with the constant expression.
+   * This method also does type inference for the new constant expression.
+   * Note about type inference
+   * Since dynamic parameter lacks type we need to figure out appropriate type to create constant
+   * out of string value. To do this, we choose the type of first child of the parent expression
+   * which isn't dynamic parameter
+   */
+  private ExprNodeDesc replaceDynamicParamsWithConstant(ExprNodeDesc expr, TypeInfo typeInfo,
+      Map<Integer, ASTNode> paramMap) {
+    if (expr.getChildren() == null || expr.getChildren().isEmpty()) {
+      if (expr instanceof ExprDynamicParamDesc) {
+        return getConstant((ExprDynamicParamDesc)expr, typeInfo, paramMap);
+      }
+      return expr;
+    }
+
+    for(ExprNodeDesc child:expr.getChildren()) {
+      // we need typeinfo
+      if(child instanceof ExprDynamicParamDesc) {
+        continue;
+      } else if( child.getTypeInfo() != TypeInfoFactory.voidTypeInfo
+          && !child.getTypeInfo().getTypeName().equals(
+          TypeInfoFactory.voidTypeInfo.getTypeName())){
+        typeInfo = child.getTypeInfo();
+        break;
+      }
+    }
+    assert(typeInfo != null);
+
+    List<ExprNodeDesc> exprList = new ArrayList<>();
+    for(ExprNodeDesc child: expr.getChildren()) {
+      if(child instanceof ExprDynamicParamDesc) {
+        child = getConstant((ExprDynamicParamDesc)child, typeInfo, paramMap);
+      } else {
+        child = replaceDynamicParamsWithConstant(child, typeInfo, paramMap);
+      }
+      exprList.add(child);
+    }
+    expr.getChildren().clear();
+    expr.getChildren().addAll(exprList);
+    return expr;
+  }
+
+  /**
+   * Given map of index and ASTNode this traverse all operators within all tasks
+   * including Fetch Task and all root tasks to find and replace all dynamic expressions
+   */
+  private void bindDynamicParams(Map<Integer, ASTNode> parameterMap) throws SemanticException{
+    assert(!parameterMap.isEmpty());
+
+    Set<Operator<?>> operators = new HashSet<>();
+    if (this.getFetchTask() != null) {
+      operators.addAll(OperatorUtils.getAllFetchOperators(this.getFetchTask()));
+    }
+    List<Task<?>> allTasks = this.getRootTasks();
+    List<TezTask> rootTasks = Utilities.getTezTasks(this.getRootTasks());
+    for(Task task:allTasks) {
+      List<BaseWork> baseWorks = new ArrayList<>();
+      if (task instanceof ExplainTask) {
+        ExplainTask explainTask = (ExplainTask) task;
+        for (Task explainRootTask : explainTask.getWork().getRootTasks()) {
+          if (explainRootTask instanceof TezTask) {
+            TezTask explainTezTask = (TezTask) explainRootTask;
+            baseWorks.addAll(explainTezTask.getWork().getAllWork());
+          }
+        }
+      } else if (task instanceof TezTask) {
+        baseWorks = ((TezTask) task).getWork().getAllWork();
+      }
+      for (BaseWork baseWork : baseWorks) {
+        operators.addAll(baseWork.getAllOperators());
+      }
+    }
+
+    for (Operator<?> op : operators) {
+      switch(op.getType()) {
+      case FILTER:
+        FilterOperator filterOp = (FilterOperator)op;
+        ExprNodeDesc predicate = filterOp.getConf().getPredicate();
+        filterOp.getConf().setPredicate(
+            replaceDynamicParamsWithConstant(predicate, TypeInfoFactory.booleanTypeInfo, parameterMap));
+        break;
+      case SELECT:

Review comment:
       I think this should not be part of this patch (only FILTER)?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/exec/OperatorUtils.java
##########
@@ -49,6 +50,8 @@
 import com.google.common.collect.Lists;
 import com.google.common.collect.Multimap;
 
+import static org.apache.hadoop.hive.ql.optimizer.physical.AnnotateRunTimeStatsOptimizer.getAllOperatorsForSimpleFetch;

Review comment:
       We should probably move this method to `TaskUtils` or `Utilities` rather than being used directly from `AnnotateRunTimeStatsOptimizer`.

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
##########
@@ -205,7 +205,9 @@
   DROP_MAPPING("DROP MAPPING", HiveParser.TOK_DROP_MAPPING, null, null, false, false),
   CREATE_SCHEDULED_QUERY("CREATE SCHEDULED QUERY", HiveParser.TOK_CREATE_SCHEDULED_QUERY, null, null),
   ALTER_SCHEDULED_QUERY("ALTER SCHEDULED QUERY", HiveParser.TOK_ALTER_SCHEDULED_QUERY, null, null),
-  DROP_SCHEDULED_QUERY("DROP SCHEDULED QUERY", HiveParser.TOK_DROP_SCHEDULED_QUERY, null, null)
+  DROP_SCHEDULED_QUERY("DROP SCHEDULED QUERY", HiveParser.TOK_DROP_SCHEDULED_QUERY, null, null),
+  PREPARE("PREPARE QUERY", HiveParser.TOK_PREPARE, null, null),
+  EXECUTE("EXECUTE QUERY", HiveParser.TOK_EXECUTE, null, null)

Review comment:
       I am wondering whether we need to add `SELECT` privilege here or it will be taken care of since we are relying on query execution logic? Could we investigate this in follow-up (probably it is important to avoid anyone bypassing authorization)?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/PrepareStatementAnalyzer.java
##########
@@ -0,0 +1,81 @@
+/*

Review comment:
       This analyzer should probably not be in `table/drop` folder?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.ExplainTask;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.tez.TezTask;
+import org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.type.ExprNodeDescExprFactory;
+import org.apache.hadoop.hive.ql.plan.BaseWork;
+import org.apache.hadoop.hive.ql.plan.ExprDynamicParamDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Analyzer for Execute statement.
+ * This analyzer
+ *  retreives cached {@link BaseSemanticAnalyzer},
+ *  makes copy of all tasks by serializing/deserializing it,
+ *  bind dynamic parameters inside cached {@link BaseSemanticAnalyzer} using values provided
+ */
+@DDLType(types = HiveParser.TOK_EXECUTE)
+public class ExecuteStatementAnalyzer extends BaseSemanticAnalyzer {
+
+  public ExecuteStatementAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  /**
+   * This class encapsulate all {@link Task} required to be copied.
+   * This is required because {@link FetchTask} list of {@link Task} may hold reference to same
+   * objects (e.g. list of result files) and are required to be serialized/de-serialized together.
+   */
+  private class PlanCopy {
+    FetchTask fetchTask;
+    List<Task<?>> tasks;
+
+    PlanCopy(FetchTask fetchTask, List<Task<?>> tasks) {
+      this.fetchTask = fetchTask;
+      this.tasks = tasks;
+    }
+
+    FetchTask getFetchTask() {
+      return fetchTask;
+    }
+
+    List<Task<?>> getTasks()  {
+      return tasks;
+    }
+  }
+
+  private String getQueryName(ASTNode root) {
+    ASTNode queryNameAST = (ASTNode)(root.getChild(1));
+    return queryNameAST.getText();
+  }
+
+  /**
+   * Utility method to create copy of provided object using kyro serialization/de-serialization.
+   */
+  private <T> T makeCopy(final Object task, Class<T> objClass) {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    SerializationUtilities.serializePlan(task, baos);
+
+    return SerializationUtilities.deserializePlan(
+        new ByteArrayInputStream(baos.toByteArray()), objClass);
+  }
+
+  /**
+   * Given a {@link BaseSemanticAnalyzer} (cached) this method make copies of all tasks
+   * (including {@link FetchTask}) and update the existing {@link ExecuteStatementAnalyzer}
+   */
+  private void createTaskCopy(final BaseSemanticAnalyzer cachedPlan) {
+    PlanCopy planCopy = new PlanCopy(cachedPlan.getFetchTask(), cachedPlan.getAllRootTasks());
+    planCopy = makeCopy(planCopy, planCopy.getClass());
+    this.setFetchTask(planCopy.getFetchTask());
+    this.rootTasks = planCopy.getTasks();
+  }
+
+  private String getParamLiteralValue(Map<Integer, ASTNode> paramMap, int paramIndex) {
+    assert(paramMap.containsKey(paramIndex));
+    ASTNode node = paramMap.get(paramIndex);
+
+    if (node.getType() == HiveParser.StringLiteral) {
+      // remove quotes
+      return BaseSemanticAnalyzer.unescapeSQLString(node.getText());
+
+    } else {
+      return node.getText();
+    }
+  }
+
+  /**
+   * This method creates a constant expression to replace the given dynamic expression.
+   * @param dynamicExpr Expression node representing Dynamic expression
+   * @param typeInfo Type info used to create constant expression from ASTNode
+   * @param parameterMap Integer to AST node map
+   */
+  private ExprNodeConstantDesc getConstant(ExprDynamicParamDesc dynamicExpr, TypeInfo typeInfo,
+      Map<Integer, ASTNode> parameterMap) {
+    assert(parameterMap.containsKey(dynamicExpr.getIndex()));
+
+    String value = getParamLiteralValue(parameterMap, dynamicExpr.getIndex());
+
+    ExprNodeDescExprFactory factory = new ExprNodeDescExprFactory();
+
+    if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
+      return factory.createBooleanConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
+      return factory.createIntConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
+      return factory.createBigintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.charTypeInfo)
+        // CHAR and VARCHAR typeinfo could differ due to different length, therefore an extra
+        // check is used (based on instanceof) to determine if it is char/varchar types
+        || typeInfo instanceof CharTypeInfo) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.varcharTypeInfo)
+        || typeInfo instanceof VarcharTypeInfo) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
+      return factory.createFloatConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
+      return factory.createDoubleConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.byteTypeInfo)) {
+      return factory.createTinyintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.shortTypeInfo)) {
+      return factory.createSmallintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.dateTypeInfo)) {
+      return factory.createDateConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
+      return factory.createTimestampConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intervalYearMonthTypeInfo)) {
+      return factory.createIntervalYearMonthConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intervalDayTimeTypeInfo)) {
+      return factory.createIntervalDayTimeConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.binaryTypeInfo)) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    }
+    // we will let constant expression itself infer the type
+    return new ExprNodeConstantDesc(parameterMap.get(dynamicExpr.getIndex()));
+  }
+
+  /**
+   * Given a list of expressions this method traverse the expression tree and replaces
+   * all {@link ExprDynamicParamDesc} nodes with constant expression.
+   * @param exprList
+   * @param paramMap
+   */
+  private List<ExprNodeDesc> replaceDynamicParamsInExprList(List<ExprNodeDesc> exprList,
+      Map<Integer, ASTNode> paramMap) {
+    List<ExprNodeDesc> updatedExprList = new ArrayList<>();
+    for (ExprNodeDesc expr:exprList) {
+      expr = replaceDynamicParamsWithConstant(expr, expr.getTypeInfo(), paramMap);
+      updatedExprList.add(expr);
+    }
+    return updatedExprList;
+  }
+
+  /**
+   * Given an expression tree root at expr and type info of the expression this method traverse
+   * the expression tree and replaces all dynamic expression with the constant expression.
+   * This method also does type inference for the new constant expression.
+   * Note about type inference
+   * Since dynamic parameter lacks type we need to figure out appropriate type to create constant
+   * out of string value. To do this, we choose the type of first child of the parent expression
+   * which isn't dynamic parameter
+   */
+  private ExprNodeDesc replaceDynamicParamsWithConstant(ExprNodeDesc expr, TypeInfo typeInfo,
+      Map<Integer, ASTNode> paramMap) {
+    if (expr.getChildren() == null || expr.getChildren().isEmpty()) {
+      if (expr instanceof ExprDynamicParamDesc) {
+        return getConstant((ExprDynamicParamDesc)expr, typeInfo, paramMap);
+      }
+      return expr;
+    }
+
+    for(ExprNodeDesc child:expr.getChildren()) {
+      // we need typeinfo
+      if(child instanceof ExprDynamicParamDesc) {
+        continue;
+      } else if( child.getTypeInfo() != TypeInfoFactory.voidTypeInfo

Review comment:
       Isn't `child.getTypeInfo() != TypeInfoFactory.voidTypeInfo` sufficient?

##########
File path: ql/src/test/queries/clientpositive/prepare_plan.q
##########
@@ -0,0 +1,112 @@
+--! qt:dataset:src

Review comment:
       We should probably add four queries that read `alltypesorc`: each query with all possible types in the filter expression using one of the following comparisons: `=`, `<`, `IN`, and `BETWEEN`. That would provide quite exhaustive testing.

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/PrepareStatementAnalyzer.java
##########
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.CalcitePlanner;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+
+/**
+ * Analyzer for Prepare queries. This analyzer generates plan for the parameterized query
+ * and save it in cache
+ */
+@DDLType(types = HiveParser.TOK_PREPARE)
+public class PrepareStatementAnalyzer extends CalcitePlanner {
+
+  public PrepareStatementAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  private String getQueryName(ASTNode root) {
+    ASTNode queryNameAST = (ASTNode)(root.getChild(1));
+    return queryNameAST.getText();
+  }
+
+  /**
+   * This method saves the current {@link PrepareStatementAnalyzer} object as well as
+   * the config used to compile the plan.
+   * @param root
+   * @throws SemanticException
+   */
+  private void savePlan(String queryName) throws SemanticException{
+    SessionState ss = SessionState.get();
+    assert(ss != null);
+
+    if (ss.getPreparePlans().containsKey(queryName)) {
+      throw new SemanticException("Prepare query: " + queryName + " already exists.");
+    }
+    ss.getPreparePlans().put(queryName, this);

Review comment:
       Could we have a single map that stores all necessary information in a single object value, rather than a map for the prepare plans and a map for the config?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/ddl/table/drop/ExecuteStatementAnalyzer.java
##########
@@ -0,0 +1,377 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.table.drop;
+
+import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLSemanticAnalyzerFactory.DDLType;
+import org.apache.hadoop.hive.ql.exec.ExplainTask;
+import org.apache.hadoop.hive.ql.exec.FetchTask;
+import org.apache.hadoop.hive.ql.exec.FilterOperator;
+import org.apache.hadoop.hive.ql.exec.Operator;
+import org.apache.hadoop.hive.ql.exec.OperatorUtils;
+import org.apache.hadoop.hive.ql.exec.SelectOperator;
+import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
+import org.apache.hadoop.hive.ql.exec.Task;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.tez.TezTask;
+import org.apache.hadoop.hive.ql.exec.vector.VectorSelectOperator;
+import org.apache.hadoop.hive.ql.parse.ASTNode;
+import org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer;
+import org.apache.hadoop.hive.ql.parse.HiveParser;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.parse.type.ExprNodeDescExprFactory;
+import org.apache.hadoop.hive.ql.plan.BaseWork;
+import org.apache.hadoop.hive.ql.plan.ExprDynamicParamDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeConstantDesc;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * Analyzer for Execute statement.
+ * This analyzer
+ *  retreives cached {@link BaseSemanticAnalyzer},
+ *  makes copy of all tasks by serializing/deserializing it,
+ *  bind dynamic parameters inside cached {@link BaseSemanticAnalyzer} using values provided
+ */
+@DDLType(types = HiveParser.TOK_EXECUTE)
+public class ExecuteStatementAnalyzer extends BaseSemanticAnalyzer {
+
+  public ExecuteStatementAnalyzer(QueryState queryState) throws SemanticException {
+    super(queryState);
+  }
+
+  /**
+   * This class encapsulate all {@link Task} required to be copied.
+   * This is required because {@link FetchTask} list of {@link Task} may hold reference to same
+   * objects (e.g. list of result files) and are required to be serialized/de-serialized together.
+   */
+  private class PlanCopy {
+    FetchTask fetchTask;
+    List<Task<?>> tasks;
+
+    PlanCopy(FetchTask fetchTask, List<Task<?>> tasks) {
+      this.fetchTask = fetchTask;
+      this.tasks = tasks;
+    }
+
+    FetchTask getFetchTask() {
+      return fetchTask;
+    }
+
+    List<Task<?>> getTasks()  {
+      return tasks;
+    }
+  }
+
+  private String getQueryName(ASTNode root) {
+    ASTNode queryNameAST = (ASTNode)(root.getChild(1));
+    return queryNameAST.getText();
+  }
+
+  /**
+   * Utility method to create copy of provided object using kyro serialization/de-serialization.
+   */
+  private <T> T makeCopy(final Object task, Class<T> objClass) {
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    SerializationUtilities.serializePlan(task, baos);
+
+    return SerializationUtilities.deserializePlan(
+        new ByteArrayInputStream(baos.toByteArray()), objClass);
+  }
+
+  /**
+   * Given a {@link BaseSemanticAnalyzer} (cached) this method make copies of all tasks
+   * (including {@link FetchTask}) and update the existing {@link ExecuteStatementAnalyzer}
+   */
+  private void createTaskCopy(final BaseSemanticAnalyzer cachedPlan) {
+    PlanCopy planCopy = new PlanCopy(cachedPlan.getFetchTask(), cachedPlan.getAllRootTasks());
+    planCopy = makeCopy(planCopy, planCopy.getClass());
+    this.setFetchTask(planCopy.getFetchTask());
+    this.rootTasks = planCopy.getTasks();
+  }
+
+  private String getParamLiteralValue(Map<Integer, ASTNode> paramMap, int paramIndex) {
+    assert(paramMap.containsKey(paramIndex));
+    ASTNode node = paramMap.get(paramIndex);
+
+    if (node.getType() == HiveParser.StringLiteral) {
+      // remove quotes
+      return BaseSemanticAnalyzer.unescapeSQLString(node.getText());
+
+    } else {
+      return node.getText();
+    }
+  }
+
+  /**
+   * This method creates a constant expression to replace the given dynamic expression.
+   * @param dynamicExpr Expression node representing Dynamic expression
+   * @param typeInfo Type info used to create constant expression from ASTNode
+   * @param parameterMap Integer to AST node map
+   */
+  private ExprNodeConstantDesc getConstant(ExprDynamicParamDesc dynamicExpr, TypeInfo typeInfo,
+      Map<Integer, ASTNode> parameterMap) {
+    assert(parameterMap.containsKey(dynamicExpr.getIndex()));
+
+    String value = getParamLiteralValue(parameterMap, dynamicExpr.getIndex());
+
+    ExprNodeDescExprFactory factory = new ExprNodeDescExprFactory();
+
+    if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
+      return factory.createBooleanConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
+      return factory.createIntConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
+      return factory.createBigintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.charTypeInfo)
+        // CHAR and VARCHAR typeinfo could differ due to different length, therefore an extra
+        // check is used (based on instanceof) to determine if it is char/varchar types
+        || typeInfo instanceof CharTypeInfo) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.varcharTypeInfo)
+        || typeInfo instanceof VarcharTypeInfo) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
+      return factory.createFloatConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
+      return factory.createDoubleConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.byteTypeInfo)) {
+      return factory.createTinyintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.shortTypeInfo)) {
+      return factory.createSmallintConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.dateTypeInfo)) {
+      return factory.createDateConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
+      return factory.createTimestampConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intervalYearMonthTypeInfo)) {
+      return factory.createIntervalYearMonthConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.intervalDayTimeTypeInfo)) {
+      return factory.createIntervalDayTimeConstantExpr(value);
+    } else if (typeInfo.equals(TypeInfoFactory.binaryTypeInfo)) {
+      //TODO: is it okay to create string
+      return factory.createStringConstantExpr(value);
+    }
+    // we will let constant expression itself infer the type
+    return new ExprNodeConstantDesc(parameterMap.get(dynamicExpr.getIndex()));
+  }
+
+  /**
+   * Given a list of expressions this method traverse the expression tree and replaces
+   * all {@link ExprDynamicParamDesc} nodes with constant expression.
+   * @param exprList
+   * @param paramMap
+   */
+  private List<ExprNodeDesc> replaceDynamicParamsInExprList(List<ExprNodeDesc> exprList,
+      Map<Integer, ASTNode> paramMap) {
+    List<ExprNodeDesc> updatedExprList = new ArrayList<>();
+    for (ExprNodeDesc expr:exprList) {
+      expr = replaceDynamicParamsWithConstant(expr, expr.getTypeInfo(), paramMap);
+      updatedExprList.add(expr);
+    }
+    return updatedExprList;
+  }
+
+  /**
+   * Given an expression tree root at expr and type info of the expression this method traverse
+   * the expression tree and replaces all dynamic expression with the constant expression.
+   * This method also does type inference for the new constant expression.
+   * Note about type inference
+   * Since dynamic parameter lacks type we need to figure out appropriate type to create constant
+   * out of string value. To do this, we choose the type of first child of the parent expression
+   * which isn't dynamic parameter
+   */
+  private ExprNodeDesc replaceDynamicParamsWithConstant(ExprNodeDesc expr, TypeInfo typeInfo,
+      Map<Integer, ASTNode> paramMap) {
+    if (expr.getChildren() == null || expr.getChildren().isEmpty()) {
+      if (expr instanceof ExprDynamicParamDesc) {
+        return getConstant((ExprDynamicParamDesc)expr, typeInfo, paramMap);
+      }
+      return expr;
+    }
+
+    for(ExprNodeDesc child:expr.getChildren()) {
+      // we need typeinfo
+      if(child instanceof ExprDynamicParamDesc) {
+        continue;
+      } else if( child.getTypeInfo() != TypeInfoFactory.voidTypeInfo
+          && !child.getTypeInfo().getTypeName().equals(
+          TypeInfoFactory.voidTypeInfo.getTypeName())){
+        typeInfo = child.getTypeInfo();
+        break;
+      }
+    }
+    assert(typeInfo != null);
+
+    List<ExprNodeDesc> exprList = new ArrayList<>();
+    for(ExprNodeDesc child: expr.getChildren()) {
+      if(child instanceof ExprDynamicParamDesc) {

Review comment:
       Could this possibly be simplified to `child = replaceDynamicParamsWithConstant(child, typeInfo, paramMap);` instead of having two code paths in the loop?

##########
File path: ql/src/java/org/apache/hadoop/hive/ql/plan/ExprDynamicParamDesc.java
##########
@@ -0,0 +1,113 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.plan;
+
+import java.io.Serializable;
+import java.util.List;
+
+import org.apache.commons.lang3.builder.HashCodeBuilder;
+import org.apache.hadoop.hive.common.StringInternUtils;
+import org.apache.hadoop.hive.serde.serdeConstants;
+import org.apache.hadoop.hive.serde2.objectinspector.ConstantObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
+import org.apache.hadoop.hive.serde2.typeinfo.BaseCharTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
+
+/**
+ * A constant expression.
+ */
+public class ExprDynamicParamDesc extends ExprNodeDesc implements Serializable {
+  private static final long serialVersionUID = 1L;
+  final protected transient static char[] hexArray = "0123456789ABCDEF".toCharArray();
+
+  private int index;
+  private Object value;
+
+  public ExprDynamicParamDesc() {
+  }
+
+  public ExprDynamicParamDesc(TypeInfo typeInfo, int index, Object value) {
+    super(typeInfo);
+    this.index =  index;
+    this.value = value;
+  }
+
+  public Object getValue() {
+    return value;
+  }
+
+  public int getIndex() {
+    return index;
+  }
+
+
+  @Override
+  public String toString() {
+    return "Dynamic Parameter " + " index: " + index;

Review comment:
       I was thinking we could use something more compact for the string representation in the plan -> `":" + index`. What do you think?

##########
File path: ql/src/test/results/clientpositive/llap/prepare_plan.q.out
##########
@@ -0,0 +1,1575 @@
+PREHOOK: query: explain extended prepare pcount from select count(*) from src where key > ?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: explain extended prepare pcount from select count(*) from src where key > ?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+OPTIMIZED SQL: SELECT COUNT(*) AS `$f0`
+FROM `default`.`src`
+WHERE `key` > CAST(? AS STRING)
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  filterExpr: (key > CAST( Dynamic Parameter  index: 1 AS STRING)) (type: boolean)
+                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  GatherStats: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: (key > CAST( Dynamic Parameter  index: 1 AS STRING)) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.99
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          bucketingVersion: 2
+                          null sort order: 
+                          numBuckets: -1
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          tag: -1
+                          value expressions: _col0 (type: bigint)
+                          auto parallelism: false
+            Execution mode: llap
+            LLAP IO: no inputs
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: src
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    bucket_count -1
+                    bucketing_version 2
+                    column.name.delimiter ,
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.src
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucketing_version 2
+                      column.name.delimiter ,
+                      columns key,value
+                      columns.comments 'default','default'
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.src
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.src
+                  name: default.src
+            Truncated Path -> Alias:
+              /src [src]
+        Reducer 2 
+            Execution mode: llap
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  bucketingVersion: 2
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      properties:
+                        bucketing_version -1
+                        columns _col0
+                        columns.types bigint
+                        escape.delim \
+                        hive.serialization.extend.additional.nesting.levels true
+                        serialization.escape.crlf true
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pcount from select count(*) from src where key > ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pcount from select count(*) from src where key > ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+378
+PREHOOK: query: explain extended prepare p1 from select * from src where key > ? order by key limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: explain extended prepare p1 from select * from src where key > ? order by key limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+OPTIMIZED SQL: SELECT `key`, `value`
+FROM `default`.`src`
+WHERE `key` > CAST(? AS STRING)
+ORDER BY `key`
+LIMIT 10
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  filterExpr: (key > CAST( Dynamic Parameter  index: 1 AS STRING)) (type: boolean)
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  GatherStats: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: (key > CAST( Dynamic Parameter  index: 1 AS STRING)) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                    Top N Key Operator
+                      sort order: +
+                      keys: key (type: string)
+                      null sort order: z
+                      Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                      top n: 10
+                      Select Operator
+                        expressions: key (type: string), value (type: string)
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          bucketingVersion: 2
+                          key expressions: _col0 (type: string)
+                          null sort order: z
+                          numBuckets: -1
+                          sort order: +
+                          Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                          tag: -1
+                          value expressions: _col1 (type: string)
+                          auto parallelism: false
+            Execution mode: llap
+            LLAP IO: no inputs
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: src
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    bucket_count -1
+                    bucketing_version 2
+                    column.name.delimiter ,
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.src
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucketing_version 2
+                      column.name.delimiter ,
+                      columns key,value
+                      columns.comments 'default','default'
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.src
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.src
+                  name: default.src
+            Truncated Path -> Alias:
+              /src [src]
+        Reducer 2 
+            Execution mode: llap
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    bucketingVersion: 2
+                    compressed: false
+                    GlobalTableId: 0
+#### A masked pattern was here ####
+                    NumFilesPerFileSink: 1
+                    Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+#### A masked pattern was here ####
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        properties:
+                          bucketing_version -1
+                          columns _col0,_col1
+                          columns.types string:string
+                          escape.delim \
+                          hive.serialization.extend.additional.nesting.levels true
+                          serialization.escape.crlf true
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    TotalFiles: 1
+                    GatherStats: false
+                    MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare p1 from select * from src where key > ? order by key limit 10
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare p1 from select * from src where key > ? order by key limit 10
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+201	val_201
+202	val_202
+203	val_203
+203	val_203
+205	val_205
+205	val_205
+207	val_207
+207	val_207
+208	val_208
+208	val_208
+PREHOOK: query: prepare p1 from select * from src where key > ? order by key limit 10
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare p1 from select * from src where key > ? order by key limit 10
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+10	val_10
+100	val_100
+100	val_100
+103	val_103
+103	val_103
+104	val_104
+104	val_104
+105	val_105
+11	val_11
+111	val_111
+PREHOOK: query: explain
+    prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain
+    prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: ((cint <= Dynamic Parameter  index: 1) and (cbigint <= Dynamic Parameter  index: 2) and (cfloat <> Dynamic Parameter  index: 3)) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 183480 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((cint <= Dynamic Parameter  index: 1) and (cbigint <= Dynamic Parameter  index: 2) and (cfloat <> Dynamic Parameter  index: 3)) (type: boolean)
+                    Statistics: Num rows: 1365 Data size: 20400 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: ctinyint (type: tinyint)
+                      outputColumnNames: ctinyint
+                      Statistics: Num rows: 1365 Data size: 20400 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: avg(ctinyint)
+                        keys: ctinyint (type: tinyint)
+                        minReductionHashAggr: 0.9040293
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 131 Data size: 10352 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: tinyint)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: tinyint)
+                          Statistics: Num rows: 131 Data size: 10352 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: struct<count:bigint,sum:double,input:tinyint>)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: avg(VALUE._col0)
+                keys: KEY._col0 (type: tinyint)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 131 Data size: 1444 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: double)
+                  outputColumnNames: _col1
+                  Statistics: Num rows: 131 Data size: 1048 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (_col1 < Dynamic Parameter  index: 4) (type: boolean)
+                    Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col1 (type: double)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: ((cint <= 100) and (cbigint <= 5000000L) and (cfloat <> 0.023)) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 183480 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((cint <= 100) and (cbigint <= 5000000L) and (cfloat <> 0.023)) (type: boolean)
+                    Statistics: Num rows: 1365 Data size: 20400 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: ctinyint (type: tinyint)
+                      outputColumnNames: ctinyint
+                      Statistics: Num rows: 1365 Data size: 20400 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: avg(ctinyint)
+                        keys: ctinyint (type: tinyint)
+                        minReductionHashAggr: 0.9040293
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 131 Data size: 10352 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: tinyint)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: tinyint)
+                          Statistics: Num rows: 131 Data size: 10352 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: struct<count:bigint,sum:double,input:tinyint>)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: avg(VALUE._col0)
+                keys: KEY._col0 (type: tinyint)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 131 Data size: 1444 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: double)
+                  outputColumnNames: _col1
+                  Statistics: Num rows: 131 Data size: 1048 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (_col1 < 0.0D) (type: boolean)
+                    Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col1 (type: double)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+-51.0
+PREHOOK: query: explain
+    prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint = ?  and csmallint != ? group by cint
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain
+    prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint = ?  and csmallint != ? group by cint
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: ((ctinyint = UDFToByte(Dynamic Parameter  index: 1)) and (csmallint <> UDFToShort(Dynamic Parameter  index: 2))) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 110092 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((ctinyint = UDFToByte(Dynamic Parameter  index: 1)) and (csmallint <> UDFToShort(Dynamic Parameter  index: 2))) (type: boolean)
+                    Statistics: Num rows: 6144 Data size: 55048 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: cint (type: int)
+                      outputColumnNames: cint
+                      Statistics: Num rows: 6144 Data size: 55048 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        keys: cint (type: int)
+                        minReductionHashAggr: 0.49983722
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 3072 Data size: 33752 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: int)
+                          Statistics: Num rows: 3072 Data size: 33752 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 3072 Data size: 33752 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: bigint)
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 3072 Data size: 24576 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 3072 Data size: 24576 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint <= ?  and csmallint != ? group by cint
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint <= ?  and csmallint != ? group by cint
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: ((ctinyint <= UDFToByte(3Y)) and (csmallint <> UDFToShort(10S))) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 110092 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((ctinyint <= UDFToByte(3Y)) and (csmallint <> UDFToShort(10S))) (type: boolean)
+                    Statistics: Num rows: 4096 Data size: 36708 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: cint (type: int)
+                      outputColumnNames: cint
+                      Statistics: Num rows: 4096 Data size: 36708 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        keys: cint (type: int)
+                        minReductionHashAggr: 0.49975586
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 2048 Data size: 22504 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: int)
+                          Statistics: Num rows: 2048 Data size: 22504 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 2048 Data size: 22504 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: bigint)
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 2048 Data size: 16384 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 2048 Data size: 16384 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint <= ?  and csmallint != ? group by cint
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint <= ?  and csmallint != ? group by cint
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+553
+547
+523
+1665
+PREHOOK: query: create table tcharvchar(c char(10), v varchar(50)) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tcharvchar
+POSTHOOK: query: create table tcharvchar(c char(10), v varchar(50)) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tcharvchar
+PREHOOK: query: insert into tcharvchar values ('c1', 'v10'), ('c2', 'v100')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tcharvchar
+POSTHOOK: query: insert into tcharvchar values ('c1', 'v10'), ('c2', 'v100')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tcharvchar
+POSTHOOK: Lineage: tcharvchar.c SCRIPT []
+POSTHOOK: Lineage: tcharvchar.v SCRIPT []
+PREHOOK: query: explain prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+POSTHOOK: query: explain prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tcharvchar
+                  filterExpr: ((c = CAST( Dynamic Parameter  index: 1 AS CHAR(10))) and (v <> CAST( Dynamic Parameter  index: 2 AS varchar(50)))) (type: boolean)
+                  Statistics: Num rows: 2 Data size: 348 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((c = CAST( Dynamic Parameter  index: 1 AS CHAR(10))) and (v <> CAST( Dynamic Parameter  index: 2 AS varchar(50)))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tcharvchar
+                  filterExpr: ((c = CAST( 'c1' AS CHAR(10))) and (v <> CAST( 'v1' AS varchar(50)))) (type: boolean)
+                  Statistics: Num rows: 2 Data size: 348 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((c = CAST( 'c1' AS CHAR(10))) and (v <> CAST( 'v1' AS varchar(50)))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+1
+PREHOOK: query: drop table tcharvchar
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tcharvchar
+PREHOOK: Output: default@tcharvchar
+POSTHOOK: query: drop table tcharvchar
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tcharvchar
+POSTHOOK: Output: default@tcharvchar
+PREHOOK: query: create table tdatets(t timestamp, d date, dc decimal(10,2)) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tdatets
+POSTHOOK: query: create table tdatets(t timestamp, d date, dc decimal(10,2)) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tdatets
+PREHOOK: query: insert into tdatets values ( cast('2011-01-01 00:00:00' as timestamp), cast('1919-11-01' as date), 5.00)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tdatets
+POSTHOOK: query: insert into tdatets values ( cast('2011-01-01 00:00:00' as timestamp), cast('1919-11-01' as date), 5.00)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tdatets
+POSTHOOK: Lineage: tdatets.d SCRIPT []
+POSTHOOK: Lineage: tdatets.dc SCRIPT []
+POSTHOOK: Lineage: tdatets.t SCRIPT []
+PREHOOK: query: insert into tdatets values ( cast('2010-01-01 04:00:00' as timestamp), cast('1918-11-01' as date), 4.00)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tdatets
+POSTHOOK: query: insert into tdatets values ( cast('2010-01-01 04:00:00' as timestamp), cast('1918-11-01' as date), 4.00)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tdatets
+POSTHOOK: Lineage: tdatets.d SCRIPT []
+POSTHOOK: Lineage: tdatets.dc SCRIPT []
+POSTHOOK: Lineage: tdatets.t SCRIPT []
+PREHOOK: query: explain
+    prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+POSTHOOK: query: explain
+    prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tdatets
+                  filterExpr: ((t <> CAST( Dynamic Parameter  index: 1 AS TIMESTAMP)) and (d <> CAST( Dynamic Parameter  index: 2 AS DATE)) and (dc > CAST( Dynamic Parameter  index: 3 AS decimal(10,2)))) (type: boolean)
+                  Statistics: Num rows: 2 Data size: 416 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((t <> CAST( Dynamic Parameter  index: 1 AS TIMESTAMP)) and (d <> CAST( Dynamic Parameter  index: 2 AS DATE)) and (dc > CAST( Dynamic Parameter  index: 3 AS decimal(10,2)))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+POSTHOOK: query: prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tdatets
+                  filterExpr: ((t <> CAST( TIMESTAMP'2012-01-01 00:01:01' AS TIMESTAMP)) and (d <> CAST( DATE'2020-01-01' AS DATE)) and (dc > CAST( 1 AS decimal(10,2)))) (type: boolean)
+                  Statistics: Num rows: 2 Data size: 416 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((t <> CAST( TIMESTAMP'2012-01-01 00:01:01' AS TIMESTAMP)) and (d <> CAST( DATE'2020-01-01' AS DATE)) and (dc > CAST( 1 AS decimal(10,2)))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+POSTHOOK: query: prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+2
+PREHOOK: query: drop table tdatets
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tdatets
+PREHOOK: Output: default@tdatets
+POSTHOOK: query: drop table tdatets
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tdatets
+POSTHOOK: Output: default@tdatets
+PREHOOK: query: explain prepare p2 from select min(ctinyint), max(cbigint) from alltypesorc where cint > (? + ? + ?) group by ctinyint
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain prepare p2 from select min(ctinyint), max(cbigint) from alltypesorc where cint > (? + ? + ?) group by ctinyint
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: (cint > UDFToInteger(((Dynamic Parameter  index: 1 + Dynamic Parameter  index: 2) + Dynamic Parameter  index: 3))) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 146784 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (cint > UDFToInteger(((Dynamic Parameter  index: 1 + Dynamic Parameter  index: 2) + Dynamic Parameter  index: 3))) (type: boolean)
+                    Statistics: Num rows: 4096 Data size: 48944 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: ctinyint (type: tinyint), cbigint (type: bigint)
+                      outputColumnNames: ctinyint, cbigint
+                      Statistics: Num rows: 4096 Data size: 48944 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: min(ctinyint), max(cbigint)
+                        keys: ctinyint (type: tinyint)
+                        minReductionHashAggr: 0.9680176
+                        mode: hash
+                        outputColumnNames: _col0, _col1, _col2
+                        Statistics: Num rows: 131 Data size: 1968 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: tinyint)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: tinyint)
+                          Statistics: Num rows: 131 Data size: 1968 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: tinyint), _col2 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: min(VALUE._col0), max(VALUE._col1)
+                keys: KEY._col0 (type: tinyint)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 131 Data size: 1968 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: tinyint), _col2 (type: bigint)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 131 Data size: 1572 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 131 Data size: 1572 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare p2 from select min(ctinyint), max(cbigint) from alltypesorc where cint > (? + ? + ?) group by ctinyint
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare p2 from select min(ctinyint), max(cbigint) from alltypesorc where cint > (? + ? + ?) group by ctinyint
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+-64	NULL
+-62	NULL
+-58	NULL
+-57	NULL
+-54	NULL
+-50	NULL
+-44	NULL
+-42	NULL
+-39	NULL
+-38	NULL
+-37	NULL
+-36	NULL
+-35	NULL
+-31	NULL
+-29	NULL
+-27	NULL
+-26	NULL
+-24	NULL
+-23	NULL
+-22	NULL
+-18	NULL
+-17	NULL
+-16	NULL
+-14	NULL
+-12	NULL
+-11	NULL
+-10	NULL
+-9	NULL
+-8	NULL
+-6	NULL
+-5	NULL
+-4	NULL
+-3	NULL
+-2	NULL
+0	NULL
+1	NULL
+2	NULL
+5	NULL
+7	NULL
+8	2141237419
+11	2145498388
+13	NULL
+17	NULL
+19	NULL
+21	NULL
+22	NULL
+25	NULL
+26	NULL
+28	NULL
+29	NULL
+32	NULL
+33	NULL
+36	NULL
+43	NULL
+44	NULL
+45	NULL
+47	NULL
+50	NULL
+51	NULL
+53	NULL
+54	NULL
+55	NULL
+56	NULL
+57	NULL
+58	NULL
+59	NULL
+60	NULL
+61	NULL
+-63	NULL
+-61	NULL
+-60	NULL
+-59	NULL
+-56	NULL
+-55	NULL
+-53	NULL
+-52	NULL
+-51	2140002067
+-49	NULL
+-48	NULL
+-47	NULL
+-46	NULL
+-45	NULL
+-43	NULL
+-41	NULL
+-40	NULL
+-34	NULL
+-33	NULL
+-32	NULL
+-30	NULL
+-28	NULL
+-25	NULL
+-21	NULL
+-20	NULL
+-19	NULL
+-15	NULL
+-13	NULL
+-7	NULL
+-1	NULL
+3	NULL
+4	NULL
+6	NULL
+9	NULL
+10	NULL
+12	NULL
+14	NULL
+15	NULL
+16	NULL
+18	NULL
+20	NULL
+23	NULL
+24	NULL
+27	NULL
+30	NULL
+31	NULL
+34	NULL
+35	NULL
+37	NULL
+38	NULL
+39	NULL
+40	NULL
+41	NULL
+42	NULL
+46	NULL
+48	NULL
+49	NULL
+52	NULL
+62	NULL
+NULL	1864027286
+PREHOOK: query: explain prepare pconcat
+    from select count(*) from src where key > concat(?, ?)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: explain prepare pconcat
+    from select count(*) from src where key > concat(?, ?)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  filterExpr: (key > concat(CAST( Dynamic Parameter  index: 1 AS STRING), Dynamic Parameter  index: 2)) (type: boolean)
+                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (key > concat(CAST( Dynamic Parameter  index: 1 AS STRING), Dynamic Parameter  index: 2)) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.99
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pconcat
+    from select count(*) from src where key > concat(?, ?)
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pconcat
+    from select count(*) from src where key > concat(?, ?)
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  filterExpr: (key > concat(CAST( '1' AS STRING), '20')) (type: boolean)
+                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (key > concat(CAST( '1' AS STRING), '20')) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.99
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pconcat
+    from select count(*) from src where key > concat(?, ?)
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pconcat
+    from select count(*) from src where key > concat(?, ?)
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+474
+PREHOOK: query: create table daysales (customer int) partitioned by (dt string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@daysales
+POSTHOOK: query: create table daysales (customer int) partitioned by (dt string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@daysales
+PREHOOK: query: insert into daysales partition(dt='2001-01-01') values(1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@daysales@dt=2001-01-01
+POSTHOOK: query: insert into daysales partition(dt='2001-01-01') values(1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@daysales@dt=2001-01-01
+POSTHOOK: Lineage: daysales PARTITION(dt=2001-01-01).customer SCRIPT []
+PREHOOK: query: insert into daysales partition(dt='2001-01-03') values(1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@daysales@dt=2001-01-03
+POSTHOOK: query: insert into daysales partition(dt='2001-01-03') values(1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@daysales@dt=2001-01-03
+POSTHOOK: Lineage: daysales PARTITION(dt=2001-01-03).customer SCRIPT []
+PREHOOK: query: insert into daysales partition(dt='2001-01-03') values(1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@daysales@dt=2001-01-03
+POSTHOOK: query: insert into daysales partition(dt='2001-01-03') values(1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@daysales@dt=2001-01-03
+POSTHOOK: Lineage: daysales PARTITION(dt=2001-01-03).customer SCRIPT []
+PREHOOK: query: explain prepare pPart1 from select count(*) from daysales where dt=? and customer=?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@daysales
+PREHOOK: Input: default@daysales@dt=2001-01-01
+PREHOOK: Input: default@daysales@dt=2001-01-03
+#### A masked pattern was here ####
+POSTHOOK: query: explain prepare pPart1 from select count(*) from daysales where dt=? and customer=?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@daysales
+POSTHOOK: Input: default@daysales@dt=2001-01-01
+POSTHOOK: Input: default@daysales@dt=2001-01-03
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: daysales
+                  filterExpr: ((dt = CAST( Dynamic Parameter  index: 1 AS STRING)) and (customer = UDFToInteger(Dynamic Parameter  index: 2))) (type: boolean)
+                  Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((dt = CAST( Dynamic Parameter  index: 1 AS STRING)) and (customer = UDFToInteger(Dynamic Parameter  index: 2))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pPart1 from select count(*) from daysales where dt=? and customer=?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@daysales
+PREHOOK: Input: default@daysales@dt=2001-01-01
+PREHOOK: Input: default@daysales@dt=2001-01-03
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pPart1 from select count(*) from daysales where dt=? and customer=?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@daysales
+POSTHOOK: Input: default@daysales@dt=2001-01-01
+POSTHOOK: Input: default@daysales@dt=2001-01-03
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: daysales
+                  filterExpr: ((dt = CAST( '2001-01-01' AS STRING)) and (customer = UDFToInteger(1))) (type: boolean)
+                  Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((dt = CAST( '2001-01-01' AS STRING)) and (customer = UDFToInteger(1))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pPart1 from select count(*) from daysales where dt=? and customer=?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@daysales
+PREHOOK: Input: default@daysales@dt=2001-01-01

Review comment:
       From the PREHOOK, it seems partitioning pruning is not done, i.e., this query has a filter on the partition column but both partitions are shown in the hook. Is this remaining work? Should we create a follow-up?

##########
File path: ql/src/test/results/clientpositive/llap/prepare_plan.q.out
##########
@@ -0,0 +1,1575 @@
+PREHOOK: query: explain extended prepare pcount from select count(*) from src where key > ?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: explain extended prepare pcount from select count(*) from src where key > ?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+OPTIMIZED SQL: SELECT COUNT(*) AS `$f0`
+FROM `default`.`src`
+WHERE `key` > CAST(? AS STRING)
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  filterExpr: (key > CAST( Dynamic Parameter  index: 1 AS STRING)) (type: boolean)
+                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  GatherStats: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: (key > CAST( Dynamic Parameter  index: 1 AS STRING)) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.99
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          bucketingVersion: 2
+                          null sort order: 
+                          numBuckets: -1
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          tag: -1
+                          value expressions: _col0 (type: bigint)
+                          auto parallelism: false
+            Execution mode: llap
+            LLAP IO: no inputs
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: src
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    bucket_count -1
+                    bucketing_version 2
+                    column.name.delimiter ,
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.src
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucketing_version 2
+                      column.name.delimiter ,
+                      columns key,value
+                      columns.comments 'default','default'
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.src
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.src
+                  name: default.src
+            Truncated Path -> Alias:
+              /src [src]
+        Reducer 2 
+            Execution mode: llap
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  bucketingVersion: 2
+                  compressed: false
+                  GlobalTableId: 0
+#### A masked pattern was here ####
+                  NumFilesPerFileSink: 1
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+#### A masked pattern was here ####
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      properties:
+                        bucketing_version -1
+                        columns _col0
+                        columns.types bigint
+                        escape.delim \
+                        hive.serialization.extend.additional.nesting.levels true
+                        serialization.escape.crlf true
+                        serialization.format 1
+                        serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  TotalFiles: 1
+                  GatherStats: false
+                  MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pcount from select count(*) from src where key > ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pcount from select count(*) from src where key > ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+378
+PREHOOK: query: explain extended prepare p1 from select * from src where key > ? order by key limit 10
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: explain extended prepare p1 from select * from src where key > ? order by key limit 10
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+OPTIMIZED SQL: SELECT `key`, `value`
+FROM `default`.`src`
+WHERE `key` > CAST(? AS STRING)
+ORDER BY `key`
+LIMIT 10
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  filterExpr: (key > CAST( Dynamic Parameter  index: 1 AS STRING)) (type: boolean)
+                  Statistics: Num rows: 500 Data size: 89000 Basic stats: COMPLETE Column stats: COMPLETE
+                  GatherStats: false
+                  Filter Operator
+                    isSamplingPred: false
+                    predicate: (key > CAST( Dynamic Parameter  index: 1 AS STRING)) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                    Top N Key Operator
+                      sort order: +
+                      keys: key (type: string)
+                      null sort order: z
+                      Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                      top n: 10
+                      Select Operator
+                        expressions: key (type: string), value (type: string)
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          bucketingVersion: 2
+                          key expressions: _col0 (type: string)
+                          null sort order: z
+                          numBuckets: -1
+                          sort order: +
+                          Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                          tag: -1
+                          value expressions: _col1 (type: string)
+                          auto parallelism: false
+            Execution mode: llap
+            LLAP IO: no inputs
+            Path -> Alias:
+#### A masked pattern was here ####
+            Path -> Partition:
+#### A masked pattern was here ####
+                Partition
+                  base file name: src
+                  input format: org.apache.hadoop.mapred.TextInputFormat
+                  output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                  properties:
+                    bucket_count -1
+                    bucketing_version 2
+                    column.name.delimiter ,
+                    columns key,value
+                    columns.types string:string
+#### A masked pattern was here ####
+                    name default.src
+                    serialization.format 1
+                    serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                  serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                
+                    input format: org.apache.hadoop.mapred.TextInputFormat
+                    output format: org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat
+                    properties:
+                      bucketing_version 2
+                      column.name.delimiter ,
+                      columns key,value
+                      columns.comments 'default','default'
+                      columns.types string:string
+#### A masked pattern was here ####
+                      name default.src
+                      serialization.format 1
+                      serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    name: default.src
+                  name: default.src
+            Truncated Path -> Alias:
+              /src [src]
+        Reducer 2 
+            Execution mode: llap
+            Needs Tagging: false
+            Reduce Operator Tree:
+              Select Operator
+                expressions: KEY.reducesinkkey0 (type: string), VALUE._col0 (type: string)
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 166 Data size: 29548 Basic stats: COMPLETE Column stats: COMPLETE
+                Limit
+                  Number of rows: 10
+                  Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    bucketingVersion: 2
+                    compressed: false
+                    GlobalTableId: 0
+#### A masked pattern was here ####
+                    NumFilesPerFileSink: 1
+                    Statistics: Num rows: 10 Data size: 1780 Basic stats: COMPLETE Column stats: COMPLETE
+#### A masked pattern was here ####
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        properties:
+                          bucketing_version -1
+                          columns _col0,_col1
+                          columns.types string:string
+                          escape.delim \
+                          hive.serialization.extend.additional.nesting.levels true
+                          serialization.escape.crlf true
+                          serialization.format 1
+                          serialization.lib org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+                    TotalFiles: 1
+                    GatherStats: false
+                    MultiFileSpray: false
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: 10
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare p1 from select * from src where key > ? order by key limit 10
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare p1 from select * from src where key > ? order by key limit 10
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+201	val_201
+202	val_202
+203	val_203
+203	val_203
+205	val_205
+205	val_205
+207	val_207
+207	val_207
+208	val_208
+208	val_208
+PREHOOK: query: prepare p1 from select * from src where key > ? order by key limit 10
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare p1 from select * from src where key > ? order by key limit 10
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+10	val_10
+100	val_100
+100	val_100
+103	val_103
+103	val_103
+104	val_104
+104	val_104
+105	val_105
+11	val_11
+111	val_111
+PREHOOK: query: explain
+    prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain
+    prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: ((cint <= Dynamic Parameter  index: 1) and (cbigint <= Dynamic Parameter  index: 2) and (cfloat <> Dynamic Parameter  index: 3)) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 183480 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((cint <= Dynamic Parameter  index: 1) and (cbigint <= Dynamic Parameter  index: 2) and (cfloat <> Dynamic Parameter  index: 3)) (type: boolean)
+                    Statistics: Num rows: 1365 Data size: 20400 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: ctinyint (type: tinyint)
+                      outputColumnNames: ctinyint
+                      Statistics: Num rows: 1365 Data size: 20400 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: avg(ctinyint)
+                        keys: ctinyint (type: tinyint)
+                        minReductionHashAggr: 0.9040293
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 131 Data size: 10352 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: tinyint)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: tinyint)
+                          Statistics: Num rows: 131 Data size: 10352 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: struct<count:bigint,sum:double,input:tinyint>)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: avg(VALUE._col0)
+                keys: KEY._col0 (type: tinyint)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 131 Data size: 1444 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: double)
+                  outputColumnNames: _col1
+                  Statistics: Num rows: 131 Data size: 1048 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (_col1 < Dynamic Parameter  index: 4) (type: boolean)
+                    Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col1 (type: double)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: ((cint <= 100) and (cbigint <= 5000000L) and (cfloat <> 0.023)) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 183480 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((cint <= 100) and (cbigint <= 5000000L) and (cfloat <> 0.023)) (type: boolean)
+                    Statistics: Num rows: 1365 Data size: 20400 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: ctinyint (type: tinyint)
+                      outputColumnNames: ctinyint
+                      Statistics: Num rows: 1365 Data size: 20400 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: avg(ctinyint)
+                        keys: ctinyint (type: tinyint)
+                        minReductionHashAggr: 0.9040293
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 131 Data size: 10352 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: tinyint)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: tinyint)
+                          Statistics: Num rows: 131 Data size: 10352 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: struct<count:bigint,sum:double,input:tinyint>)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: avg(VALUE._col0)
+                keys: KEY._col0 (type: tinyint)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 131 Data size: 1444 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: double)
+                  outputColumnNames: _col1
+                  Statistics: Num rows: 131 Data size: 1048 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (_col1 < 0.0D) (type: boolean)
+                    Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: _col1 (type: double)
+                      outputColumnNames: _col0
+                      Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                      File Output Operator
+                        compressed: false
+                        Statistics: Num rows: 43 Data size: 344 Basic stats: COMPLETE Column stats: COMPLETE
+                        table:
+                            input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                            output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                            serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pint
+    from select avg(ctinyint) as ag from alltypesorc where cint <= ?  and cbigint <= ? and cfloat != ? group by ctinyint having ag < ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+-51.0
+PREHOOK: query: explain
+    prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint = ?  and csmallint != ? group by cint
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain
+    prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint = ?  and csmallint != ? group by cint
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: ((ctinyint = UDFToByte(Dynamic Parameter  index: 1)) and (csmallint <> UDFToShort(Dynamic Parameter  index: 2))) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 110092 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((ctinyint = UDFToByte(Dynamic Parameter  index: 1)) and (csmallint <> UDFToShort(Dynamic Parameter  index: 2))) (type: boolean)
+                    Statistics: Num rows: 6144 Data size: 55048 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: cint (type: int)
+                      outputColumnNames: cint
+                      Statistics: Num rows: 6144 Data size: 55048 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        keys: cint (type: int)
+                        minReductionHashAggr: 0.49983722
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 3072 Data size: 33752 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: int)
+                          Statistics: Num rows: 3072 Data size: 33752 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 3072 Data size: 33752 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: bigint)
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 3072 Data size: 24576 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 3072 Data size: 24576 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint <= ?  and csmallint != ? group by cint
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint <= ?  and csmallint != ? group by cint
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: ((ctinyint <= UDFToByte(3Y)) and (csmallint <> UDFToShort(10S))) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 110092 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((ctinyint <= UDFToByte(3Y)) and (csmallint <> UDFToShort(10S))) (type: boolean)
+                    Statistics: Num rows: 4096 Data size: 36708 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: cint (type: int)
+                      outputColumnNames: cint
+                      Statistics: Num rows: 4096 Data size: 36708 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        keys: cint (type: int)
+                        minReductionHashAggr: 0.49975586
+                        mode: hash
+                        outputColumnNames: _col0, _col1
+                        Statistics: Num rows: 2048 Data size: 22504 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: int)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: int)
+                          Statistics: Num rows: 2048 Data size: 22504 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                keys: KEY._col0 (type: int)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1
+                Statistics: Num rows: 2048 Data size: 22504 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: bigint)
+                  outputColumnNames: _col0
+                  Statistics: Num rows: 2048 Data size: 16384 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 2048 Data size: 16384 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint <= ?  and csmallint != ? group by cint
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare psint
+    from select count(*) as ag from alltypesorc where ctinyint <= ?  and csmallint != ? group by cint
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+553
+547
+523
+1665
+PREHOOK: query: create table tcharvchar(c char(10), v varchar(50)) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tcharvchar
+POSTHOOK: query: create table tcharvchar(c char(10), v varchar(50)) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tcharvchar
+PREHOOK: query: insert into tcharvchar values ('c1', 'v10'), ('c2', 'v100')
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tcharvchar
+POSTHOOK: query: insert into tcharvchar values ('c1', 'v10'), ('c2', 'v100')
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tcharvchar
+POSTHOOK: Lineage: tcharvchar.c SCRIPT []
+POSTHOOK: Lineage: tcharvchar.v SCRIPT []
+PREHOOK: query: explain prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+POSTHOOK: query: explain prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tcharvchar
+                  filterExpr: ((c = CAST( Dynamic Parameter  index: 1 AS CHAR(10))) and (v <> CAST( Dynamic Parameter  index: 2 AS varchar(50)))) (type: boolean)
+                  Statistics: Num rows: 2 Data size: 348 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((c = CAST( Dynamic Parameter  index: 1 AS CHAR(10))) and (v <> CAST( Dynamic Parameter  index: 2 AS varchar(50)))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tcharvchar
+                  filterExpr: ((c = CAST( 'c1' AS CHAR(10))) and (v <> CAST( 'v1' AS varchar(50)))) (type: boolean)
+                  Statistics: Num rows: 2 Data size: 348 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((c = CAST( 'c1' AS CHAR(10))) and (v <> CAST( 'v1' AS varchar(50)))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 174 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pcharv  from select count(*) from tcharvchar where c = ? and v != ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@tcharvchar
+#### A masked pattern was here ####
+1
+PREHOOK: query: drop table tcharvchar
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tcharvchar
+PREHOOK: Output: default@tcharvchar
+POSTHOOK: query: drop table tcharvchar
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tcharvchar
+POSTHOOK: Output: default@tcharvchar
+PREHOOK: query: create table tdatets(t timestamp, d date, dc decimal(10,2)) stored as orc
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@tdatets
+POSTHOOK: query: create table tdatets(t timestamp, d date, dc decimal(10,2)) stored as orc
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@tdatets
+PREHOOK: query: insert into tdatets values ( cast('2011-01-01 00:00:00' as timestamp), cast('1919-11-01' as date), 5.00)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tdatets
+POSTHOOK: query: insert into tdatets values ( cast('2011-01-01 00:00:00' as timestamp), cast('1919-11-01' as date), 5.00)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tdatets
+POSTHOOK: Lineage: tdatets.d SCRIPT []
+POSTHOOK: Lineage: tdatets.dc SCRIPT []
+POSTHOOK: Lineage: tdatets.t SCRIPT []
+PREHOOK: query: insert into tdatets values ( cast('2010-01-01 04:00:00' as timestamp), cast('1918-11-01' as date), 4.00)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@tdatets
+POSTHOOK: query: insert into tdatets values ( cast('2010-01-01 04:00:00' as timestamp), cast('1918-11-01' as date), 4.00)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@tdatets
+POSTHOOK: Lineage: tdatets.d SCRIPT []
+POSTHOOK: Lineage: tdatets.dc SCRIPT []
+POSTHOOK: Lineage: tdatets.t SCRIPT []
+PREHOOK: query: explain
+    prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+POSTHOOK: query: explain
+    prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tdatets
+                  filterExpr: ((t <> CAST( Dynamic Parameter  index: 1 AS TIMESTAMP)) and (d <> CAST( Dynamic Parameter  index: 2 AS DATE)) and (dc > CAST( Dynamic Parameter  index: 3 AS decimal(10,2)))) (type: boolean)
+                  Statistics: Num rows: 2 Data size: 416 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((t <> CAST( Dynamic Parameter  index: 1 AS TIMESTAMP)) and (d <> CAST( Dynamic Parameter  index: 2 AS DATE)) and (dc > CAST( Dynamic Parameter  index: 3 AS decimal(10,2)))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+POSTHOOK: query: prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: tdatets
+                  filterExpr: ((t <> CAST( TIMESTAMP'2012-01-01 00:01:01' AS TIMESTAMP)) and (d <> CAST( DATE'2020-01-01' AS DATE)) and (dc > CAST( 1 AS decimal(10,2)))) (type: boolean)
+                  Statistics: Num rows: 2 Data size: 416 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((t <> CAST( TIMESTAMP'2012-01-01 00:01:01' AS TIMESTAMP)) and (d <> CAST( DATE'2020-01-01' AS DATE)) and (dc > CAST( 1 AS decimal(10,2)))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 208 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+POSTHOOK: query: prepare ptsd from select count(*) from tdatets where t != ? and d != ? and dc > ?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@tdatets
+#### A masked pattern was here ####
+2
+PREHOOK: query: drop table tdatets
+PREHOOK: type: DROPTABLE
+PREHOOK: Input: default@tdatets
+PREHOOK: Output: default@tdatets
+POSTHOOK: query: drop table tdatets
+POSTHOOK: type: DROPTABLE
+POSTHOOK: Input: default@tdatets
+POSTHOOK: Output: default@tdatets
+PREHOOK: query: explain prepare p2 from select min(ctinyint), max(cbigint) from alltypesorc where cint > (? + ? + ?) group by ctinyint
+PREHOOK: type: QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: explain prepare p2 from select min(ctinyint), max(cbigint) from alltypesorc where cint > (? + ? + ?) group by ctinyint
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: alltypesorc
+                  filterExpr: (cint > UDFToInteger(((Dynamic Parameter  index: 1 + Dynamic Parameter  index: 2) + Dynamic Parameter  index: 3))) (type: boolean)
+                  Statistics: Num rows: 12288 Data size: 146784 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (cint > UDFToInteger(((Dynamic Parameter  index: 1 + Dynamic Parameter  index: 2) + Dynamic Parameter  index: 3))) (type: boolean)
+                    Statistics: Num rows: 4096 Data size: 48944 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      expressions: ctinyint (type: tinyint), cbigint (type: bigint)
+                      outputColumnNames: ctinyint, cbigint
+                      Statistics: Num rows: 4096 Data size: 48944 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: min(ctinyint), max(cbigint)
+                        keys: ctinyint (type: tinyint)
+                        minReductionHashAggr: 0.9680176
+                        mode: hash
+                        outputColumnNames: _col0, _col1, _col2
+                        Statistics: Num rows: 131 Data size: 1968 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          key expressions: _col0 (type: tinyint)
+                          null sort order: z
+                          sort order: +
+                          Map-reduce partition columns: _col0 (type: tinyint)
+                          Statistics: Num rows: 131 Data size: 1968 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col1 (type: tinyint), _col2 (type: bigint)
+            Execution mode: llap
+            LLAP IO: all inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: min(VALUE._col0), max(VALUE._col1)
+                keys: KEY._col0 (type: tinyint)
+                mode: mergepartial
+                outputColumnNames: _col0, _col1, _col2
+                Statistics: Num rows: 131 Data size: 1968 Basic stats: COMPLETE Column stats: COMPLETE
+                Select Operator
+                  expressions: _col1 (type: tinyint), _col2 (type: bigint)
+                  outputColumnNames: _col0, _col1
+                  Statistics: Num rows: 131 Data size: 1572 Basic stats: COMPLETE Column stats: COMPLETE
+                  File Output Operator
+                    compressed: false
+                    Statistics: Num rows: 131 Data size: 1572 Basic stats: COMPLETE Column stats: COMPLETE
+                    table:
+                        input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                        output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                        serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare p2 from select min(ctinyint), max(cbigint) from alltypesorc where cint > (? + ? + ?) group by ctinyint
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+POSTHOOK: query: prepare p2 from select min(ctinyint), max(cbigint) from alltypesorc where cint > (? + ? + ?) group by ctinyint
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@alltypesorc
+#### A masked pattern was here ####
+-64	NULL
+-62	NULL
+-58	NULL
+-57	NULL
+-54	NULL
+-50	NULL
+-44	NULL
+-42	NULL
+-39	NULL
+-38	NULL
+-37	NULL
+-36	NULL
+-35	NULL
+-31	NULL
+-29	NULL
+-27	NULL
+-26	NULL
+-24	NULL
+-23	NULL
+-22	NULL
+-18	NULL
+-17	NULL
+-16	NULL
+-14	NULL
+-12	NULL
+-11	NULL
+-10	NULL
+-9	NULL
+-8	NULL
+-6	NULL
+-5	NULL
+-4	NULL
+-3	NULL
+-2	NULL
+0	NULL
+1	NULL
+2	NULL
+5	NULL
+7	NULL
+8	2141237419
+11	2145498388
+13	NULL
+17	NULL
+19	NULL
+21	NULL
+22	NULL
+25	NULL
+26	NULL
+28	NULL
+29	NULL
+32	NULL
+33	NULL
+36	NULL
+43	NULL
+44	NULL
+45	NULL
+47	NULL
+50	NULL
+51	NULL
+53	NULL
+54	NULL
+55	NULL
+56	NULL
+57	NULL
+58	NULL
+59	NULL
+60	NULL
+61	NULL
+-63	NULL
+-61	NULL
+-60	NULL
+-59	NULL
+-56	NULL
+-55	NULL
+-53	NULL
+-52	NULL
+-51	2140002067
+-49	NULL
+-48	NULL
+-47	NULL
+-46	NULL
+-45	NULL
+-43	NULL
+-41	NULL
+-40	NULL
+-34	NULL
+-33	NULL
+-32	NULL
+-30	NULL
+-28	NULL
+-25	NULL
+-21	NULL
+-20	NULL
+-19	NULL
+-15	NULL
+-13	NULL
+-7	NULL
+-1	NULL
+3	NULL
+4	NULL
+6	NULL
+9	NULL
+10	NULL
+12	NULL
+14	NULL
+15	NULL
+16	NULL
+18	NULL
+20	NULL
+23	NULL
+24	NULL
+27	NULL
+30	NULL
+31	NULL
+34	NULL
+35	NULL
+37	NULL
+38	NULL
+39	NULL
+40	NULL
+41	NULL
+42	NULL
+46	NULL
+48	NULL
+49	NULL
+52	NULL
+62	NULL
+NULL	1864027286
+PREHOOK: query: explain prepare pconcat
+    from select count(*) from src where key > concat(?, ?)
+PREHOOK: type: QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: explain prepare pconcat
+    from select count(*) from src where key > concat(?, ?)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  filterExpr: (key > concat(CAST( Dynamic Parameter  index: 1 AS STRING), Dynamic Parameter  index: 2)) (type: boolean)
+                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (key > concat(CAST( Dynamic Parameter  index: 1 AS STRING), Dynamic Parameter  index: 2)) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.99
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pconcat
+    from select count(*) from src where key > concat(?, ?)
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pconcat
+    from select count(*) from src where key > concat(?, ?)
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: src
+                  filterExpr: (key > concat(CAST( '1' AS STRING), '20')) (type: boolean)
+                  Statistics: Num rows: 500 Data size: 43500 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: (key > concat(CAST( '1' AS STRING), '20')) (type: boolean)
+                    Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 166 Data size: 14442 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.99
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pconcat
+    from select count(*) from src where key > concat(?, ?)
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@src
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pconcat
+    from select count(*) from src where key > concat(?, ?)
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@src
+#### A masked pattern was here ####
+474
+PREHOOK: query: create table daysales (customer int) partitioned by (dt string)
+PREHOOK: type: CREATETABLE
+PREHOOK: Output: database:default
+PREHOOK: Output: default@daysales
+POSTHOOK: query: create table daysales (customer int) partitioned by (dt string)
+POSTHOOK: type: CREATETABLE
+POSTHOOK: Output: database:default
+POSTHOOK: Output: default@daysales
+PREHOOK: query: insert into daysales partition(dt='2001-01-01') values(1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@daysales@dt=2001-01-01
+POSTHOOK: query: insert into daysales partition(dt='2001-01-01') values(1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@daysales@dt=2001-01-01
+POSTHOOK: Lineage: daysales PARTITION(dt=2001-01-01).customer SCRIPT []
+PREHOOK: query: insert into daysales partition(dt='2001-01-03') values(1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@daysales@dt=2001-01-03
+POSTHOOK: query: insert into daysales partition(dt='2001-01-03') values(1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@daysales@dt=2001-01-03
+POSTHOOK: Lineage: daysales PARTITION(dt=2001-01-03).customer SCRIPT []
+PREHOOK: query: insert into daysales partition(dt='2001-01-03') values(1)
+PREHOOK: type: QUERY
+PREHOOK: Input: _dummy_database@_dummy_table
+PREHOOK: Output: default@daysales@dt=2001-01-03
+POSTHOOK: query: insert into daysales partition(dt='2001-01-03') values(1)
+POSTHOOK: type: QUERY
+POSTHOOK: Input: _dummy_database@_dummy_table
+POSTHOOK: Output: default@daysales@dt=2001-01-03
+POSTHOOK: Lineage: daysales PARTITION(dt=2001-01-03).customer SCRIPT []
+PREHOOK: query: explain prepare pPart1 from select count(*) from daysales where dt=? and customer=?
+PREHOOK: type: QUERY
+PREHOOK: Input: default@daysales
+PREHOOK: Input: default@daysales@dt=2001-01-01
+PREHOOK: Input: default@daysales@dt=2001-01-03
+#### A masked pattern was here ####
+POSTHOOK: query: explain prepare pPart1 from select count(*) from daysales where dt=? and customer=?
+POSTHOOK: type: QUERY
+POSTHOOK: Input: default@daysales
+POSTHOOK: Input: default@daysales@dt=2001-01-01
+POSTHOOK: Input: default@daysales@dt=2001-01-03
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 depends on stages: Stage-1
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: daysales
+                  filterExpr: ((dt = CAST( Dynamic Parameter  index: 1 AS STRING)) and (customer = UDFToInteger(Dynamic Parameter  index: 2))) (type: boolean)
+                  Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((dt = CAST( Dynamic Parameter  index: 1 AS STRING)) and (customer = UDFToInteger(Dynamic Parameter  index: 2))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pPart1 from select count(*) from daysales where dt=? and customer=?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@daysales
+PREHOOK: Input: default@daysales@dt=2001-01-01
+PREHOOK: Input: default@daysales@dt=2001-01-03
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pPart1 from select count(*) from daysales where dt=? and customer=?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@daysales
+POSTHOOK: Input: default@daysales@dt=2001-01-01
+POSTHOOK: Input: default@daysales@dt=2001-01-03
+#### A masked pattern was here ####
+STAGE DEPENDENCIES:
+  Stage-1 is a root stage
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-1
+    Tez
+#### A masked pattern was here ####
+      Edges:
+        Reducer 2 <- Map 1 (CUSTOM_SIMPLE_EDGE)
+#### A masked pattern was here ####
+      Vertices:
+        Map 1 
+            Map Operator Tree:
+                TableScan
+                  alias: daysales
+                  filterExpr: ((dt = CAST( '2001-01-01' AS STRING)) and (customer = UDFToInteger(1))) (type: boolean)
+                  Statistics: Num rows: 3 Data size: 564 Basic stats: COMPLETE Column stats: COMPLETE
+                  Filter Operator
+                    predicate: ((dt = CAST( '2001-01-01' AS STRING)) and (customer = UDFToInteger(1))) (type: boolean)
+                    Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: COMPLETE
+                    Select Operator
+                      Statistics: Num rows: 1 Data size: 188 Basic stats: COMPLETE Column stats: COMPLETE
+                      Group By Operator
+                        aggregations: count()
+                        minReductionHashAggr: 0.0
+                        mode: hash
+                        outputColumnNames: _col0
+                        Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                        Reduce Output Operator
+                          null sort order: 
+                          sort order: 
+                          Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                          value expressions: _col0 (type: bigint)
+            Execution mode: llap
+            LLAP IO: no inputs
+        Reducer 2 
+            Execution mode: llap
+            Reduce Operator Tree:
+              Group By Operator
+                aggregations: count(VALUE._col0)
+                mode: mergepartial
+                outputColumnNames: _col0
+                Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                File Output Operator
+                  compressed: false
+                  Statistics: Num rows: 1 Data size: 8 Basic stats: COMPLETE Column stats: COMPLETE
+                  table:
+                      input format: org.apache.hadoop.mapred.SequenceFileInputFormat
+                      output format: org.apache.hadoop.hive.ql.io.HiveSequenceFileOutputFormat
+                      serde: org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe
+
+  Stage: Stage-0
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: prepare pPart1 from select count(*) from daysales where dt=? and customer=?
+PREHOOK: type: EXECUTE QUERY
+PREHOOK: Input: default@daysales
+PREHOOK: Input: default@daysales@dt=2001-01-01
+PREHOOK: Input: default@daysales@dt=2001-01-03
+#### A masked pattern was here ####
+POSTHOOK: query: prepare pPart1 from select count(*) from daysales where dt=? and customer=?
+POSTHOOK: type: EXECUTE QUERY
+POSTHOOK: Input: default@daysales
+POSTHOOK: Input: default@daysales@dt=2001-01-01
+POSTHOOK: Input: default@daysales@dt=2001-01-03
+#### A masked pattern was here ####
+1
+PREHOOK: query: prepare pPart1 from select count(*) from daysales where dt=? and customer=?

Review comment:
       Should we show the execute statement instead? Or at least both? Otherwise it is difficult to know what is being executed (can be deferred to follow-up).




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org


Issue Time Tracking
-------------------

    Worklog Id:     (was: 465876)
    Time Spent: 50m  (was: 40m)

> Support parameterized queries in WHERE/HAVING clause
> ----------------------------------------------------
>
>                 Key: HIVE-23951
>                 URL: https://issues.apache.org/jira/browse/HIVE-23951
>             Project: Hive
>          Issue Type: Sub-task
>          Components: Query Planning
>            Reporter: Vineet Garg
>            Assignee: Vineet Garg
>            Priority: Major
>              Labels: pull-request-available
>          Time Spent: 50m
>  Remaining Estimate: 0h
>




--
This message was sent by Atlassian Jira
(v8.3.4#803005)

Mime
View raw message