This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git

commit a3ee8d0050a0ac652abd3b46022c28d75cdc3340
Author: Miklos Gergely <mgerg...@hortonworks.com>
AuthorDate: Wed Jun 26 17:34:03 2019 +0200

    HIVE-21914: Move Function and Macro related DDL operations into the DDL 
framework (Miklos Gergely via Zoltan Haindrich)
    
    Signed-off-by: Zoltan Haindrich <k...@rxd.hu>
---
 .../hive/llap/daemon/impl/FunctionLocalizer.java   |   4 +-
 .../{plan => ddl/function}/CreateFunctionDesc.java |  66 ++--
 .../ql/ddl/function/CreateFunctionOperation.java   | 217 +++++++++++
 .../hive/ql/ddl/function/CreateMacroDesc.java      |  81 +++++
 .../function/CreateMacroOperation.java}            |  38 +-
 .../{plan => ddl/function}/DropFunctionDesc.java   |  51 +--
 .../ql/ddl/function/DropFunctionOperation.java     | 107 ++++++
 .../ql/{plan => ddl/function}/DropMacroDesc.java   |  28 +-
 .../function/DropMacroOperation.java}              |  42 +--
 .../function/ReloadFunctionsDesc.java}             |  13 +-
 .../function/ReloadFunctionsOperation.java}        |  43 +--
 .../apache/hadoop/hive/ql/exec/FunctionTask.java   | 399 ---------------------
 .../apache/hadoop/hive/ql/exec/FunctionUtils.java  |  55 ++-
 .../org/apache/hadoop/hive/ql/exec/Registry.java   |   4 +-
 .../apache/hadoop/hive/ql/exec/TaskFactory.java    |   3 -
 .../org/apache/hadoop/hive/ql/metadata/Hive.java   |   3 +-
 .../hive/ql/optimizer/QueryPlanPostProcessor.java  |   2 -
 .../hive/ql/parse/FunctionSemanticAnalyzer.java    |  18 +-
 .../org/apache/hadoop/hive/ql/parse/HiveParser.g   |  11 +-
 .../hive/ql/parse/MacroSemanticAnalyzer.java       |  12 +-
 .../hive/ql/parse/SemanticAnalyzerFactory.java     |   4 +-
 .../repl/load/message/CreateFunctionHandler.java   |  12 +-
 .../repl/load/message/DropFunctionHandler.java     |   9 +-
 .../hadoop/hive/ql/plan/CreateMacroDesc.java       |  73 ----
 .../apache/hadoop/hive/ql/plan/FunctionWork.java   |  93 -----
 .../hive/ql/parse/TestMacroSemanticAnalyzer.java   |   3 +
 .../hadoop/hive/ql/plan/TestCreateMacroDesc.java   |   8 +-
 .../hadoop/hive/ql/plan/TestDropMacroDesc.java     |   3 +-
 ql/src/test/queries/clientpositive/create_func1.q  |  15 +
 .../create_function_nonexistent_class.q.out        |   2 +-
 .../create_function_nonudf_class.q.out             |   2 +-
 .../clientnegative/create_unknown_genericudf.q.out |   2 +-
 .../clientnegative/create_unknown_udf_udaf.q.out   |   2 +-
 .../test/results/clientnegative/ivyDownload.q.out  |   2 +-
 .../udf_function_does_not_implement_udf.q.out      |   2 +-
 .../clientnegative/udf_local_resource.q.out        |   2 +-
 .../clientnegative/udf_nonexistent_resource.q.out  |   2 +-
 .../test/results/clientpositive/create_func1.q.out | 102 ++++++
 .../clientpositive/create_genericudaf.q.out        |   4 +
 .../results/clientpositive/create_genericudf.q.out |   4 +
 .../test/results/clientpositive/create_udaf.q.out  |   4 +
 ql/src/test/results/clientpositive/drop_udf.q.out  |   3 +
 .../clientpositive/tez/explainanalyze_3.q.out      |   2 +
 .../results/clientpositive/tez/explainuser_3.q.out |   2 +
 .../clientpositive/udf_compare_java_string.q.out   |   4 +
 .../clientpositive/udf_logic_java_boolean.q.out    |   4 +
 .../results/clientpositive/udf_testlength.q.out    |   4 +
 .../results/clientpositive/udf_testlength2.q.out   |   4 +
 48 files changed, 778 insertions(+), 792 deletions(-)

diff --git 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/FunctionLocalizer.java
 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/FunctionLocalizer.java
index 136fe2a..315c5be 100644
--- 
a/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/FunctionLocalizer.java
+++ 
b/llap-server/src/java/org/apache/hadoop/hive/llap/daemon/impl/FunctionLocalizer.java
@@ -37,7 +37,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException;
 import org.apache.hadoop.hive.metastore.api.ResourceUri;
 import org.apache.hadoop.hive.ql.exec.AddToClassPathAction;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.exec.FunctionTask;
+import org.apache.hadoop.hive.ql.exec.FunctionUtils;
 import org.apache.hadoop.hive.ql.exec.UDFClassLoader;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -269,7 +269,7 @@ public class FunctionLocalizer implements 
GenericUDFBridge.UdfWhitelistChecker {
     }
     for (ResourceUri resource : resources) {
       URI srcUri = ResourceDownloader.createURI(resource.getUri());
-      ResourceType rt = 
FunctionTask.getResourceType(resource.getResourceType());
+      ResourceType rt = 
FunctionUtils.getResourceType(resource.getResourceType());
       localizeOneResource(fqfn, srcUri, rt, result);
     }
     recentlyLocalizedClasses.add(className);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateFunctionDesc.java
similarity index 58%
rename from ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java
rename to 
ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateFunctionDesc.java
index 92c00ca..9e21aeb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateFunctionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateFunctionDesc.java
@@ -16,86 +16,64 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.function;
 
 import java.io.Serializable;
 import java.util.List;
 
 import org.apache.hadoop.hive.metastore.api.ResourceUri;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
 import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
- * CreateFunctionDesc.
- *
+ * DDL task description for CREATE [TEMPORARY] FUNCTION commands.
  */
 @Explain(displayName = "Create Function", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
-public class CreateFunctionDesc implements Serializable {
+public class CreateFunctionDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
-  private String functionName;
-  private String className;
-  private boolean isTemp;
-  private List<ResourceUri> resources;
-  private ReplicationSpec replicationSpec;
+  private final String name;
+  private final String className;
+  private final boolean isTemporary;
+  private final List<ResourceUri> resources;
+  private final ReplicationSpec replicationSpec;
 
-  /**
-   * For serialization only.
-   */
-  public CreateFunctionDesc() {
-  }
-  
-  public CreateFunctionDesc(String functionName, boolean isTemp, String 
className,
-      List<ResourceUri> resources, ReplicationSpec replicationSpec) {
-    this.functionName = functionName;
-    this.isTemp = isTemp;
+  public CreateFunctionDesc(String name, String className, boolean 
isTemporary, List<ResourceUri> resources,
+      ReplicationSpec replicationSpec) {
+    this.name = name;
     this.className = className;
+    this.isTemporary = isTemporary;
     this.resources = resources;
-    this.replicationSpec = replicationSpec;
+    this.replicationSpec = replicationSpec == null ? new ReplicationSpec() : 
replicationSpec;
   }
 
   @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
-  public String getFunctionName() {
-    return functionName;
+  public String getName() {
+    return name;
   }
 
-  public void setFunctionName(String functionName) {
-    this.functionName = functionName;
-  }
-
-  @Explain(displayName = "class")
+  @Explain(displayName = "class", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
   public String getClassName() {
     return className;
   }
 
-  public void setClassName(String className) {
-    this.className = className;
-  }
-
+  @Explain(displayName = "temporary", displayOnlyOnTrue = true,
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
   public boolean isTemp() {
-    return isTemp;
-  }
-
-  public void setTemp(boolean isTemp) {
-    this.isTemp = isTemp;
+    return isTemporary;
   }
 
   public List<ResourceUri> getResources() {
     return resources;
   }
 
-  public void setResources(List<ResourceUri> resources) {
-    this.resources = resources;
-  }
-
   /**
    * @return what kind of replication scope this create is running under.
    * This can result in a "CREATE IF NEWER THAN" kind of semantic
    */
   public ReplicationSpec getReplicationSpec() {
-    if (replicationSpec == null) {
-      this.replicationSpec = new ReplicationSpec();
-    }
-    return this.replicationSpec;
+    return replicationSpec;
   }
 }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateFunctionOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateFunctionOperation.java
new file mode 100644
index 0000000..995beed
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateFunctionOperation.java
@@ -0,0 +1,217 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.FunctionUtils;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.commons.collections.CollectionUtils;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
+import org.apache.hadoop.hive.metastore.api.Function;
+import org.apache.hadoop.hive.metastore.api.FunctionType;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.metastore.api.ResourceUri;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.util.ResourceDownloader;
+
+/**
+ * Operation process of creating a function.
+ */
+public class CreateFunctionOperation extends DDLOperation<CreateFunctionDesc> {
+  public CreateFunctionOperation(DDLOperationContext context, 
CreateFunctionDesc desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    if (desc.isTemp()) {
+      return createTemporaryFunction();
+    } else {
+      try {
+        return createPermanentFunction();
+      } catch (Exception e) {
+        return handlePermanentFunctionCreationException(e);
+      }
+    }
+  }
+
+  private int createTemporaryFunction() {
+    try {
+      // Add any required resources
+      FunctionResource[] resources = 
FunctionUtils.toFunctionResource(desc.getResources());
+      FunctionUtils.addFunctionResources(resources);
+
+      Class<?> udfClass = getUdfClass();
+      FunctionInfo registered = 
FunctionRegistry.registerTemporaryUDF(desc.getName(), udfClass, resources);
+      if (registered != null) {
+        return 0;
+      } else {
+        context.getConsole().printError(
+            "FAILED: Class " + desc.getClassName() + " does not implement UDF, 
GenericUDF, or UDAF");
+        return 1;
+      }
+    } catch (HiveException e) {
+      context.getConsole().printError("FAILED: " + e.toString());
+      LOG.info("create function: ", e);
+      return 1;
+    } catch (ClassNotFoundException e) {
+      context.getConsole().printError("FAILED: Class " + desc.getClassName() + 
" not found");
+      LOG.info("create function: ", e);
+      return 1;
+    }
+  }
+
+  private Class<?> getUdfClass() throws ClassNotFoundException {
+    // get the session specified class loader from SessionState
+    ClassLoader classLoader = Utilities.getSessionSpecifiedClassLoader();
+    return Class.forName(desc.getClassName(), true, classLoader);
+  }
+
+  // todo authorization
+  private int createPermanentFunction() throws HiveException, IOException {
+    String[] qualifiedNameParts = 
FunctionUtils.getQualifiedFunctionNameParts(desc.getName());
+    String dbName = qualifiedNameParts[0];
+    String functionName = qualifiedNameParts[1];
+
+    if (skipIfNewerThenUpdate(dbName, functionName)) {
+      return 0;
+    }
+
+    // For permanent functions, check for any resources from local filesystem.
+    checkLocalFunctionResources();
+
+    String registeredName = FunctionUtils.qualifyFunctionName(functionName, 
dbName);
+    boolean registrationSuccess = registerFunction(registeredName);
+    if (!registrationSuccess) {
+      context.getConsole().printError("Failed to register " + registeredName + 
" using class " + desc.getClassName());
+      return 1;
+    }
+
+    boolean addToMetastoreSuccess = addToMetastore(dbName, functionName, 
registeredName);
+    if (!addToMetastoreSuccess) {
+      return 1;
+    }
+
+    return 0;
+  }
+
+  private boolean skipIfNewerThenUpdate(String dbName, String functionName) 
throws HiveException {
+    if (desc.getReplicationSpec().isInReplicationScope()) {
+      Map<String, String> dbProps = 
Hive.get().getDatabase(dbName).getParameters();
+      if (!desc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
+        // If the database is newer than the create event, then noop it.
+        LOG.debug("FunctionTask: Create Function {} is skipped as database {} 
is newer than update",
+            functionName, dbName);
+        return true;
+      }
+    }
+
+    return false;
+  }
+
+  private void checkLocalFunctionResources() throws HiveException {
+    // If this is a non-local warehouse, then adding resources from the local 
filesystem
+    // may mean that other clients will not be able to access the resources.
+    // So disallow resources from local filesystem in this case.
+    if (CollectionUtils.isNotEmpty(desc.getResources())) {
+      try {
+        String localFsScheme = 
FileSystem.getLocal(context.getDb().getConf()).getUri().getScheme();
+        String configuredFsScheme = 
FileSystem.get(context.getDb().getConf()).getUri().getScheme();
+        if (configuredFsScheme.equals(localFsScheme)) {
+          // Configured warehouse FS is local, don't need to bother checking.
+          return;
+        }
+
+        for (ResourceUri res : desc.getResources()) {
+          String resUri = res.getUri();
+          if (ResourceDownloader.isFileUri(resUri)) {
+            throw new HiveException("Hive warehouse is non-local, but " + 
res.getUri() + " specifies file on local "
+                + "filesystem. Resources on non-local warehouse should specify 
a non-local scheme/path");
+          }
+        }
+      } catch (HiveException e) {
+        throw e;
+      } catch (Exception e) {
+        LOG.error("Exception caught in checkLocalFunctionResources", e);
+        throw new HiveException(e);
+      }
+    }
+  }
+
+  private boolean registerFunction(String registeredName) throws 
SemanticException, HiveException {
+    FunctionInfo registered = null;
+    HiveConf oldConf = SessionState.get().getConf();
+    try {
+      SessionState.get().setConf(context.getConf());
+      registered = FunctionRegistry.registerPermanentFunction(registeredName, 
desc.getClassName(), true,
+          FunctionUtils.toFunctionResource(desc.getResources()));
+    } catch (RuntimeException ex) {
+      Throwable t = ex;
+      while (t.getCause() != null) {
+        t = t.getCause();
+      }
+      context.getTask().setException(t);
+    } finally {
+      SessionState.get().setConf(oldConf);
+    }
+    return registered != null;
+  }
+
+  private boolean addToMetastore(String dbName, String functionName, String 
registeredName) throws HiveException {
+    try {
+      // TODO: should this use getUserFromAuthenticator instead of 
SessionState.get().getUserName()?
+      Function function = new Function(functionName, dbName, 
desc.getClassName(), SessionState.get().getUserName(),
+          PrincipalType.USER, (int) (System.currentTimeMillis() / 1000), 
FunctionType.JAVA, desc.getResources());
+      context.getDb().createFunction(function);
+      return true;
+    } catch (Exception e) {
+      // Addition to metastore failed, remove the function from the registry 
except if already exists.
+      if (!(e.getCause() instanceof AlreadyExistsException)) {
+        FunctionRegistry.unregisterPermanentFunction(registeredName);
+      }
+      context.getTask().setException(e);
+      LOG.error("Failed to add function " + desc.getName() + " to the 
metastore.", e);
+      return false;
+    }
+  }
+
+  private int handlePermanentFunctionCreationException(Exception e) {
+    // For repl load flow, function may exist for first incremental phase. So, 
just return success.
+    if (desc.getReplicationSpec().isInReplicationScope() && (e.getCause() 
instanceof AlreadyExistsException)) {
+      LOG.info("Create function is idempotent as function: " + desc.getName() 
+ " already exists.");
+      return 0;
+    }
+    context.getTask().setException(e);
+    LOG.error("Failed to create function", e);
+    return 1;
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateMacroDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateMacroDesc.java
new file mode 100644
index 0000000..ba1b82e
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateMacroDesc.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function;
+
+import java.io.Serializable;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
+import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
+
+/**
+ * DDL task description for CREATE TEMPORARY MACRO commands.
+ */
+@Explain(displayName = "Create Macro", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
+public class CreateMacroDesc implements DDLDesc, Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private final String macroName;
+  private final List<String> columnNames;
+  private final List<TypeInfo> columnTypes;
+  private final ExprNodeDesc body;
+
+  public CreateMacroDesc(String macroName, List<String> columnNames, 
List<TypeInfo> columnTypes, ExprNodeDesc body) {
+    this.macroName = macroName;
+    this.columnNames = columnNames;
+    this.columnTypes = columnTypes;
+    this.body = body;
+  }
+
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
+  public String getName() {
+    return macroName;
+  }
+
+  @Explain(displayName = "column names", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
+  public List<String> getColumnNames() {
+    return columnNames;
+  }
+
+  public List<TypeInfo> getColumnTypes() {
+    return columnTypes;
+  }
+
+  /** For explaining only. */
+  @Explain(displayName = "column types", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
+  public List<String> getColumnTypeStrings() {
+    return columnTypes.stream()
+        .map(typeInfo -> typeInfo.getTypeName())
+        .collect(Collectors.toList());
+  }
+
+  public ExprNodeDesc getBody() {
+    return body;
+  }
+
+  /** For explaining only. */
+  @Explain(displayName = "body", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
+  public String getBodyString() {
+    return body.toString();
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateMacroOperation.java
similarity index 54%
copy from ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java
copy to 
ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateMacroOperation.java
index 2b0c683..c7787db 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/CreateMacroOperation.java
@@ -16,35 +16,25 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
-
-import java.io.Serializable;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
+package org.apache.hadoop.hive.ql.ddl.function;
 
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 /**
- * DropMacroDesc.
- *
+ * Operation process of creating a macro.
  */
-@Explain(displayName = "Drop Macro", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
-public class DropMacroDesc implements Serializable {
-  private static final long serialVersionUID = 1L;
-
-  private String macroName;
-
-  /**
-   * For serialization only.
-   */
-  public DropMacroDesc() {
+public class CreateMacroOperation extends DDLOperation<CreateMacroDesc> {
+  public CreateMacroOperation(DDLOperationContext context, CreateMacroDesc 
desc) {
+    super(context, desc);
   }
 
-  public DropMacroDesc(String macroName) {
-    this.macroName = macroName;
+  @Override
+  public int execute() throws HiveException {
+    FunctionRegistry.registerTemporaryMacro(desc.getName(), desc.getBody(), 
desc.getColumnNames(),
+        desc.getColumnTypes());
+    return 0;
   }
-
-  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
-  public String getMacroName() {
-    return macroName;
-  }
-
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropFunctionDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropFunctionDesc.java
similarity index 62%
rename from ql/src/java/org/apache/hadoop/hive/ql/plan/DropFunctionDesc.java
rename to 
ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropFunctionDesc.java
index d3415a5..bc952e7 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropFunctionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropFunctionDesc.java
@@ -16,53 +16,41 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.function;
 
 import java.io.Serializable;
 
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
 import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-
 /**
- * DropFunctionDesc.
- *
+ * DDL task description for DROP [TEMPORARY] FUNCTION commands.
  */
 @Explain(displayName = "Drop Function", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
-public class DropFunctionDesc implements Serializable {
+public class DropFunctionDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
-  private String functionName;
-  private boolean isTemp;
-  private ReplicationSpec replicationSpec;
+  private final String name;
+  private final boolean isTemporary;
+  private final ReplicationSpec replicationSpec;
 
-  /**
-   * For serialization only.
-   */
-  public DropFunctionDesc() {
-  }
-  
-  public DropFunctionDesc(String functionName, boolean isTemp, ReplicationSpec 
replicationSpec) {
-    this.functionName = functionName;
-    this.isTemp = isTemp;
-    this.replicationSpec = replicationSpec;
+  public DropFunctionDesc(String name, boolean isTemporary, ReplicationSpec 
replicationSpec) {
+    this.name = name;
+    this.isTemporary = isTemporary;
+    this.replicationSpec = replicationSpec == null ? new ReplicationSpec() : 
replicationSpec;
   }
 
   @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
-  public String getFunctionName() {
-    return functionName;
-  }
-
-  public void setFunctionName(String functionName) {
-    this.functionName = functionName;
-  }
-
-  public boolean isTemp() {
-    return isTemp;
+  public String getName() {
+    return name;
   }
 
-  public void setTemp(boolean isTemp) {
-    this.isTemp = isTemp;
+  @Explain(displayName = "temporary", displayOnlyOnTrue = true,
+      explainLevels = { Level.USER, Level.DEFAULT, Level.EXTENDED })
+  public boolean isTemporary() {
+    return isTemporary;
   }
 
   /**
@@ -70,9 +58,6 @@ public class DropFunctionDesc implements Serializable {
    * This can result in a "DROP IF NEWER THAN" kind of semantic
    */
   public ReplicationSpec getReplicationSpec() {
-    if (replicationSpec == null) {
-      this.replicationSpec = new ReplicationSpec();
-    }
     return this.replicationSpec;
   }
 }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropFunctionOperation.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropFunctionOperation.java
new file mode 100644
index 0000000..fae8583
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropFunctionOperation.java
@@ -0,0 +1,107 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.FunctionUtils;
+
+import java.util.Map;
+
+import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.util.StringUtils;
+
+/**
+ * Operation process of dropping a function.
+ */
+public class DropFunctionOperation extends DDLOperation<DropFunctionDesc> {
+  public DropFunctionOperation(DDLOperationContext context, DropFunctionDesc 
desc) {
+    super(context, desc);
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    if (desc.isTemporary()) {
+      return dropTemporaryFunction();
+    } else {
+      try {
+        return dropPermanentFunction();
+      } catch (Exception e) {
+        context.getTask().setException(e);
+        LOG.error("Failed to drop function", e);
+        return 1;
+      }
+    }
+  }
+
+  private int dropTemporaryFunction() {
+    try {
+      FunctionRegistry.unregisterTemporaryUDF(desc.getName());
+      return 0;
+    } catch (HiveException e) {
+      LOG.info("drop function: ", e);
+      return 1;
+    }
+  }
+
+  // todo authorization
+  private int dropPermanentFunction() throws HiveException {
+    String[] qualifiedNameParts = 
FunctionUtils.getQualifiedFunctionNameParts(desc.getName());
+    String dbName = qualifiedNameParts[0];
+    String functionName = qualifiedNameParts[1];
+
+    if (skipIfNewerThenUpdate(dbName, functionName)) {
+      return 0;
+    }
+
+    try {
+      String registeredName = FunctionUtils.qualifyFunctionName(functionName, 
dbName);
+      FunctionRegistry.unregisterPermanentFunction(registeredName);
+      context.getDb().dropFunction(dbName, functionName);
+
+      return 0;
+    } catch (Exception e) {
+      // For repl load flow, function may not exist for first incremental 
phase. So, just return success.
+      if (desc.getReplicationSpec().isInReplicationScope() && (e.getCause() 
instanceof NoSuchObjectException)) {
+        LOG.info("Drop function is idempotent as function: " + desc.getName() 
+ " doesn't exist.");
+        return 0;
+      }
+      LOG.info("drop function: ", e);
+      context.getConsole().printError("FAILED: error during drop function: " + 
StringUtils.stringifyException(e));
+      return 1;
+    }
+  }
+
+  private boolean skipIfNewerThenUpdate(String dbName, String functionName) 
throws HiveException {
+    if (desc.getReplicationSpec().isInReplicationScope()) {
+      Map<String, String> dbProps = 
Hive.get().getDatabase(dbName).getParameters();
+      if (!desc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
+        // If the database is newer than the drop event, then noop it.
+        LOG.debug("FunctionTask: Drop Function {} is skipped as database {} is 
newer than update", functionName,
+            dbName);
+        return true;
+      }
+    }
+
+    return false;
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropMacroDesc.java
similarity index 74%
copy from ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java
copy to ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropMacroDesc.java
index 2b0c683..a09bfb4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropMacroDesc.java
@@ -16,35 +16,29 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.function;
 
 import java.io.Serializable;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
- * DropMacroDesc.
- *
+ * DDL task description for DROP TEMPORARY MACRO commands.
  */
 @Explain(displayName = "Drop Macro", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
-public class DropMacroDesc implements Serializable {
+public class DropMacroDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 
-  private String macroName;
+  private final String name;
 
-  /**
-   * For serialization only.
-   */
-  public DropMacroDesc() {
-  }
-
-  public DropMacroDesc(String macroName) {
-    this.macroName = macroName;
+  public DropMacroDesc(String name) {
+    this.name = name;
   }
 
   @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
-  public String getMacroName() {
-    return macroName;
+  public String getName() {
+    return name;
   }
-
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropMacroOperation.java
similarity index 53%
copy from ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java
copy to 
ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropMacroOperation.java
index 2b0c683..f7e5acb 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DropMacroOperation.java
@@ -16,35 +16,29 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
-
-import java.io.Serializable;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
+package org.apache.hadoop.hive.ql.ddl.function;
 
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 /**
- * DropMacroDesc.
- *
+ * Operation process of dropping a macro.
  */
-@Explain(displayName = "Drop Macro", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
-public class DropMacroDesc implements Serializable {
-  private static final long serialVersionUID = 1L;
-
-  private String macroName;
-
-  /**
-   * For serialization only.
-   */
-  public DropMacroDesc() {
+public class DropMacroOperation extends DDLOperation<DropMacroDesc> {
+  public DropMacroOperation(DDLOperationContext context, DropMacroDesc desc) {
+    super(context, desc);
   }
 
-  public DropMacroDesc(String macroName) {
-    this.macroName = macroName;
+  @Override
+  public int execute() throws HiveException {
+    try {
+      FunctionRegistry.unregisterTemporaryUDF(desc.getName());
+      return 0;
+    } catch (HiveException e) {
+      LOG.info("drop macro: ", e);
+      return 1;
+    }
   }
-
-  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
-  public String getMacroName() {
-    return macroName;
-  }
-
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReloadFunctionDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ReloadFunctionsDesc.java
similarity index 68%
rename from ql/src/java/org/apache/hadoop/hive/ql/plan/ReloadFunctionDesc.java
rename to 
ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ReloadFunctionsDesc.java
index ac1fba2..f87ee55 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ReloadFunctionDesc.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ReloadFunctionsDesc.java
@@ -16,11 +16,18 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.function;
 
 import java.io.Serializable;
 
-@Explain(displayName = "Reload Function")
-public class ReloadFunctionDesc implements Serializable {
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.plan.Explain;
+
+/**
+ * DDL task description for RELOAD FUNCTIONS commands.
+ * Due to backward compatibility reasons we also accept the RELOAD FUNCTION 
command.
+ */
+@Explain(displayName = "Reload Functions")
+public class ReloadFunctionsDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ReloadFunctionsOperation.java
similarity index 51%
rename from ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java
rename to 
ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ReloadFunctionsOperation.java
index 2b0c683..2ab119a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropMacroDesc.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ReloadFunctionsOperation.java
@@ -16,35 +16,30 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
-
-import java.io.Serializable;
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
+package org.apache.hadoop.hive.ql.ddl.function;
 
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.Hive;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
 
 /**
- * DropMacroDesc.
- *
+ * Operation process of reloading the functions.
  */
-@Explain(displayName = "Drop Macro", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
-public class DropMacroDesc implements Serializable {
-  private static final long serialVersionUID = 1L;
-
-  private String macroName;
-
-  /**
-   * For serialization only.
-   */
-  public DropMacroDesc() {
+public class ReloadFunctionsOperation extends 
DDLOperation<ReloadFunctionsDesc> {
+  public ReloadFunctionsOperation(DDLOperationContext context, 
ReloadFunctionsDesc desc) {
+    super(context, desc);
   }
 
-  public DropMacroDesc(String macroName) {
-    this.macroName = macroName;
+  @Override
+  public int execute() throws HiveException {
+    try {
+      Hive.get().reloadFunctions();
+      return 0;
+    } catch (Exception e) {
+      context.getTask().setException(e);
+      LOG.error("Failed to reload functions", e);
+      return 1;
+    }
   }
-
-  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
-  public String getMacroName() {
-    return macroName;
-  }
-
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
deleted file mode 100644
index 2061cf4..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionTask.java
+++ /dev/null
@@ -1,399 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.exec;
-
-import java.io.IOException;
-import java.util.List;
-import java.util.Map;
-
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.Multimap;
-
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.hive.conf.HiveConf;
-import org.apache.hadoop.hive.metastore.api.AlreadyExistsException;
-import org.apache.hadoop.hive.metastore.api.Function;
-import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
-import org.apache.hadoop.hive.metastore.api.PrincipalType;
-import org.apache.hadoop.hive.metastore.api.ResourceType;
-import org.apache.hadoop.hive.metastore.api.ResourceUri;
-import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
-import org.apache.hadoop.hive.ql.CompilationOpContext;
-import org.apache.hadoop.hive.ql.DriverContext;
-import org.apache.hadoop.hive.ql.QueryPlan;
-import org.apache.hadoop.hive.ql.QueryState;
-import org.apache.hadoop.hive.ql.metadata.Hive;
-import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
-import org.apache.hadoop.hive.ql.plan.DropFunctionDesc;
-import org.apache.hadoop.hive.ql.plan.CreateMacroDesc;
-import org.apache.hadoop.hive.ql.plan.DropMacroDesc;
-import org.apache.hadoop.hive.ql.plan.FunctionWork;
-import org.apache.hadoop.hive.ql.plan.api.StageType;
-import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.ql.util.ResourceDownloader;
-import org.apache.hadoop.util.StringUtils;
-
-/**
- * FunctionTask.
- *
- */
-public class FunctionTask extends Task<FunctionWork> {
-  private static final long serialVersionUID = 1L;
-  private static transient final Logger LOG = 
LoggerFactory.getLogger(FunctionTask.class);
-
-  public FunctionTask() {
-    super();
-  }
-
-  @Override
-  public void initialize(QueryState queryState, QueryPlan queryPlan, 
DriverContext ctx,
-      CompilationOpContext opContext) {
-    super.initialize(queryState, queryPlan, ctx, opContext);
-  }
-
-  @Override
-  public int execute(DriverContext driverContext) {
-    CreateFunctionDesc createFunctionDesc = work.getCreateFunctionDesc();
-    if (createFunctionDesc != null) {
-      if (createFunctionDesc.isTemp()) {
-        return createTemporaryFunction(createFunctionDesc);
-      } else {
-        try {
-          if (createFunctionDesc.getReplicationSpec().isInReplicationScope()) {
-            String[] qualifiedNameParts = 
FunctionUtils.getQualifiedFunctionNameParts(
-                    createFunctionDesc.getFunctionName());
-            String dbName = qualifiedNameParts[0];
-            String funcName = qualifiedNameParts[1];
-            Map<String, String> dbProps = 
Hive.get().getDatabase(dbName).getParameters();
-            if 
(!createFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
-              // If the database is newer than the create event, then noop it.
-              LOG.debug("FunctionTask: Create Function {} is skipped as 
database {} " +
-                        "is newer than update", funcName, dbName);
-              return 0;
-            }
-          }
-          return createPermanentFunction(Hive.get(conf), createFunctionDesc);
-        } catch (Exception e) {
-          // For repl load flow, function may exist for first incremental 
phase. So, just return success.
-          if (createFunctionDesc.getReplicationSpec().isInReplicationScope()
-                  && (e.getCause() instanceof AlreadyExistsException)) {
-            LOG.info("Create function is idempotent as function: "
-                    + createFunctionDesc.getFunctionName() + " already 
exists.");
-            return 0;
-          }
-          setException(e);
-          LOG.error("Failed to create function", e);
-          return 1;
-        }
-      }
-    }
-
-    DropFunctionDesc dropFunctionDesc = work.getDropFunctionDesc();
-    if (dropFunctionDesc != null) {
-      if (dropFunctionDesc.isTemp()) {
-        return dropTemporaryFunction(dropFunctionDesc);
-      } else {
-        try {
-          if (dropFunctionDesc.getReplicationSpec().isInReplicationScope()) {
-            String[] qualifiedNameParts = 
FunctionUtils.getQualifiedFunctionNameParts(
-                    dropFunctionDesc.getFunctionName());
-            String dbName = qualifiedNameParts[0];
-            String funcName = qualifiedNameParts[1];
-            Map<String, String> dbProps = 
Hive.get().getDatabase(dbName).getParameters();
-            if 
(!dropFunctionDesc.getReplicationSpec().allowEventReplacementInto(dbProps)) {
-              // If the database is newer than the drop event, then noop it.
-              LOG.debug("FunctionTask: Drop Function {} is skipped as database 
{} " +
-                        "is newer than update", funcName, dbName);
-              return 0;
-            }
-          }
-          return dropPermanentFunction(Hive.get(conf), dropFunctionDesc);
-        } catch (Exception e) {
-          setException(e);
-          LOG.error("Failed to drop function", e);
-          return 1;
-        }
-      }
-    }
-
-    if (work.getReloadFunctionDesc() != null) {
-      try {
-        Hive.get().reloadFunctions();
-      } catch (Exception e) {
-        setException(e);
-        LOG.error("Failed to reload functions", e);
-        return 1;
-      }
-    }
-
-    CreateMacroDesc createMacroDesc = work.getCreateMacroDesc();
-    if (createMacroDesc != null) {
-      return createMacro(createMacroDesc);
-    }
-
-    DropMacroDesc dropMacroDesc = work.getDropMacroDesc();
-    if (dropMacroDesc != null) {
-      return dropMacro(dropMacroDesc);
-    }
-    return 0;
-  }
-
-  // todo authorization
-  private int createPermanentFunction(Hive db, CreateFunctionDesc 
createFunctionDesc)
-      throws HiveException, IOException {
-    String[] qualifiedNameParts = FunctionUtils.getQualifiedFunctionNameParts(
-        createFunctionDesc.getFunctionName());
-    String dbName = qualifiedNameParts[0];
-    String funcName = qualifiedNameParts[1];
-    String registeredName = FunctionUtils.qualifyFunctionName(funcName, 
dbName);
-    String className = createFunctionDesc.getClassName();
-
-    List<ResourceUri> resources = createFunctionDesc.getResources();
-
-    // For permanent functions, check for any resources from local filesystem.
-    checkLocalFunctionResources(db, createFunctionDesc.getResources());
-
-    FunctionInfo registered = null;
-    HiveConf oldConf = SessionState.get().getConf();
-    try {
-      SessionState.get().setConf(conf);
-      registered = FunctionRegistry.registerPermanentFunction(
-        registeredName, className, true, toFunctionResource(resources));
-    } catch (RuntimeException ex) {
-      Throwable t = ex;
-      while (t.getCause() != null) {
-        t = t.getCause();
-      }
-    } finally {
-      SessionState.get().setConf(oldConf);
-    }
-
-    if (registered == null) {
-      console.printError("Failed to register " + registeredName
-          + " using class " + createFunctionDesc.getClassName());
-      return 1;
-    }
-
-    // Add to metastore
-    Function func = new Function(
-        funcName,
-        dbName,
-        className,
-        SessionState.get().getUserName(), // TODO: should this use 
getUserFromAuthenticator?
-        PrincipalType.USER,
-        (int) (System.currentTimeMillis() / 1000),
-        org.apache.hadoop.hive.metastore.api.FunctionType.JAVA,
-        resources
-    );
-    try {
-      db.createFunction(func);
-    } catch (Exception e) {
-      // Addition to metastore failed, remove the function from the registry 
except if already exists.
-      if(!(e.getCause() instanceof AlreadyExistsException)) {
-        FunctionRegistry.unregisterPermanentFunction(registeredName);
-      }
-      setException(e);
-      LOG.error("Failed to add function " + 
createFunctionDesc.getFunctionName() +
-              " to the metastore.", e);
-      return 1;
-    }
-    return 0;
-  }
-
-  private int createTemporaryFunction(CreateFunctionDesc createFunctionDesc) {
-    try {
-      // Add any required resources
-      FunctionResource[] resources = 
toFunctionResource(createFunctionDesc.getResources());
-      addFunctionResources(resources);
-
-      Class<?> udfClass = getUdfClass(createFunctionDesc);
-      FunctionInfo registered = FunctionRegistry.registerTemporaryUDF(
-          createFunctionDesc.getFunctionName(), udfClass, resources);
-      if (registered != null) {
-        return 0;
-      }
-      console.printError("FAILED: Class " + createFunctionDesc.getClassName()
-          + " does not implement UDF, GenericUDF, or UDAF");
-      return 1;
-    } catch (HiveException e) {
-      console.printError("FAILED: " + e.toString());
-      LOG.info("create function: ", e);
-      return 1;
-    } catch (ClassNotFoundException e) {
-
-      console.printError("FAILED: Class " + createFunctionDesc.getClassName() 
+ " not found");
-      LOG.info("create function: ", e);
-      return 1;
-    }
-  }
-
-  private int createMacro(CreateMacroDesc createMacroDesc) {
-    FunctionRegistry.registerTemporaryMacro(
-      createMacroDesc.getMacroName(),
-      createMacroDesc.getBody(),
-      createMacroDesc.getColNames(),
-      createMacroDesc.getColTypes()
-    );
-    return 0;
-  }
-
-  private int dropMacro(DropMacroDesc dropMacroDesc) {
-    try {
-      FunctionRegistry.unregisterTemporaryUDF(dropMacroDesc.getMacroName());
-      return 0;
-    } catch (HiveException e) {
-      LOG.info("drop macro: ", e);
-      return 1;
-    }
-  }
-
-  // todo authorization
-  private int dropPermanentFunction(Hive db, DropFunctionDesc 
dropFunctionDesc) {
-    try {
-      String[] qualifiedNameParts = 
FunctionUtils.getQualifiedFunctionNameParts(
-          dropFunctionDesc.getFunctionName());
-      String dbName = qualifiedNameParts[0];
-      String funcName = qualifiedNameParts[1];
-
-      String registeredName = FunctionUtils.qualifyFunctionName(funcName, 
dbName);
-      FunctionRegistry.unregisterPermanentFunction(registeredName);
-      db.dropFunction(dbName, funcName);
-
-      return 0;
-    } catch (Exception e) {
-      // For repl load flow, function may not exist for first incremental 
phase. So, just return success.
-      if (dropFunctionDesc.getReplicationSpec().isInReplicationScope()
-              && (e.getCause() instanceof NoSuchObjectException)) {
-        LOG.info("Drop function is idempotent as function: "
-                + dropFunctionDesc.getFunctionName() + " doesn't exist.");
-        return 0;
-      }
-      LOG.info("drop function: ", e);
-      console.printError("FAILED: error during drop function: " + 
StringUtils.stringifyException(e));
-      return 1;
-    }
-  }
-
-  private int dropTemporaryFunction(DropFunctionDesc dropFunctionDesc) {
-    try {
-      
FunctionRegistry.unregisterTemporaryUDF(dropFunctionDesc.getFunctionName());
-      return 0;
-    } catch (HiveException e) {
-      LOG.info("drop function: ", e);
-      return 1;
-    }
-  }
-
-  private void checkLocalFunctionResources(Hive db, List<ResourceUri> 
resources)
-      throws HiveException {
-    // If this is a non-local warehouse, then adding resources from the local 
filesystem
-    // may mean that other clients will not be able to access the resources.
-    // So disallow resources from local filesystem in this case.
-    if (resources != null && resources.size() > 0) {
-      try {
-        String localFsScheme = 
FileSystem.getLocal(db.getConf()).getUri().getScheme();
-        String configuredFsScheme = 
FileSystem.get(db.getConf()).getUri().getScheme();
-        if (configuredFsScheme.equals(localFsScheme)) {
-          // Configured warehouse FS is local, don't need to bother checking.
-          return;
-        }
-
-        for (ResourceUri res : resources) {
-          String resUri = res.getUri();
-          if (ResourceDownloader.isFileUri(resUri)) {
-            throw new HiveException("Hive warehouse is non-local, but "
-                + res.getUri() + " specifies file on local filesystem. "
-                + "Resources on non-local warehouse should specify a non-local 
scheme/path");
-          }
-        }
-      } catch (HiveException e) {
-        throw e;
-      } catch (Exception e) {
-        LOG.error("Exception caught in checkLocalFunctionResources", e);
-        throw new HiveException(e);
-      }
-    }
-  }
-
-  public static FunctionResource[] toFunctionResource(List<ResourceUri> 
resources)
-      throws HiveException {
-    if (resources == null) {
-      return null;
-    }
-    FunctionResource[] converted = new FunctionResource[resources.size()];
-    for (int i = 0; i < converted.length; i++) {
-      ResourceUri resource = resources.get(i);
-      SessionState.ResourceType type = 
getResourceType(resource.getResourceType());
-      converted[i] = new FunctionResource(type, resource.getUri());
-    }
-    return converted;
-  }
-
-  public static SessionState.ResourceType getResourceType(ResourceType rt) {
-    switch (rt) {
-      case JAR:
-        return SessionState.ResourceType.JAR;
-      case FILE:
-        return SessionState.ResourceType.FILE;
-      case ARCHIVE:
-        return SessionState.ResourceType.ARCHIVE;
-      default:
-        throw new AssertionError("Unexpected resource type " + rt);
-    }
-  }
-
-  public static void addFunctionResources(FunctionResource[] resources) throws 
HiveException {
-    if (resources != null) {
-      Multimap<SessionState.ResourceType, String> mappings = 
HashMultimap.create();
-      for (FunctionResource res : resources) {
-        mappings.put(res.getResourceType(), res.getResourceURI());
-      }
-      for (SessionState.ResourceType type : mappings.keys()) {
-        SessionState.get().add_resources(type, mappings.get(type));
-      }
-    }
-  }
-
-  private Class<?> getUdfClass(CreateFunctionDesc desc) throws 
ClassNotFoundException {
-    // get the session specified class loader from SessionState
-    ClassLoader classLoader = Utilities.getSessionSpecifiedClassLoader();
-    return Class.forName(desc.getClassName(), true, classLoader);
-  }
-
-  @Override
-  public StageType getType() {
-    return StageType.FUNC;
-  }
-
-  @Override
-  public String getName() {
-    return "FUNCTION";
-  }
-
-  /**
-   * this needs access to session state resource downloads which in turn uses 
references to Registry objects.
-   */
-  @Override
-  public boolean canExecuteInParallel() {
-    return false;
-  }
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionUtils.java
index 200e26c..d63d317 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionUtils.java
@@ -18,6 +18,11 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
+import java.util.List;
+
+import org.apache.hadoop.hive.metastore.api.ResourceType;
+import org.apache.hadoop.hive.metastore.api.ResourceUri;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFResolver;
@@ -25,7 +30,55 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDF;
 import org.apache.hadoop.hive.ql.udf.generic.GenericUDTF;
 import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver;
 
-public class FunctionUtils {
+import com.google.common.collect.HashMultimap;
+import com.google.common.collect.Multimap;
+
+/**
+ * Function related utilities.
+ */
+public final class FunctionUtils {
+  private FunctionUtils() {
+    throw new UnsupportedOperationException("FunctionUtils should not be 
instantiated");
+  }
+
+  public static FunctionResource[] toFunctionResource(List<ResourceUri> 
resources) throws HiveException {
+    if (resources == null) {
+      return null;
+    }
+
+    FunctionResource[] converted = new FunctionResource[resources.size()];
+    for (int i = 0; i < converted.length; i++) {
+      ResourceUri resource = resources.get(i);
+      SessionState.ResourceType type = 
getResourceType(resource.getResourceType());
+      converted[i] = new FunctionResource(type, resource.getUri());
+    }
+    return converted;
+  }
+
+  public static void addFunctionResources(FunctionResource[] resources) throws 
HiveException {
+    if (resources != null) {
+      Multimap<SessionState.ResourceType, String> mappings = 
HashMultimap.create();
+      for (FunctionResource res : resources) {
+        mappings.put(res.getResourceType(), res.getResourceURI());
+      }
+      for (SessionState.ResourceType type : mappings.keys()) {
+        SessionState.get().add_resources(type, mappings.get(type));
+      }
+    }
+  }
+
+  public static SessionState.ResourceType getResourceType(ResourceType rt) {
+    switch (rt) {
+    case JAR:
+      return SessionState.ResourceType.JAR;
+    case FILE:
+      return SessionState.ResourceType.FILE;
+    case ARCHIVE:
+      return SessionState.ResourceType.ARCHIVE;
+    default:
+      throw new AssertionError("Unexpected resource type " + rt);
+    }
+  }
 
   public static boolean isQualifiedFunctionName(String functionName) {
     return functionName.indexOf('.') >= 0;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java
index f4a46e6..fc2a0e1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/Registry.java
@@ -661,7 +661,7 @@ public class Registry {
       // At this point we should add any relevant jars that would be needed 
for the UDf.
       FunctionResource[] resources = function.getResources();
       try {
-        FunctionTask.addFunctionResources(resources);
+        FunctionUtils.addFunctionResources(resources);
       } catch (Exception e) {
         LOG.error("Unable to load resources for " + qualifiedName + ":" + e, 
e);
         return null;
@@ -786,7 +786,7 @@ public class Registry {
       }
       // Found UDF in metastore - now add it to the function registry.
       FunctionInfo fi = registerPermanentFunction(functionName, 
func.getClassName(), true,
-          FunctionTask.toFunctionResource(func.getResourceUris()));
+          FunctionUtils.toFunctionResource(func.getResourceUris()));
       if (fi == null) {
         LOG.error(func.getClassName() + " is not a valid UDF class and was not 
registered");
         return null;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java
index 7eeca5f..7025b4a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/TaskFactory.java
@@ -46,7 +46,6 @@ import org.apache.hadoop.hive.ql.plan.ExplainSQRewriteWork;
 import org.apache.hadoop.hive.ql.plan.ExplainWork;
 import org.apache.hadoop.hive.ql.plan.ExportWork;
 import org.apache.hadoop.hive.ql.plan.FetchWork;
-import org.apache.hadoop.hive.ql.plan.FunctionWork;
 import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
 import org.apache.hadoop.hive.ql.plan.MoveWork;
@@ -92,8 +91,6 @@ public final class TaskFactory {
     taskvec.add(new TaskTuple<MaterializedViewDesc>(
         MaterializedViewDesc.class,
         MaterializedViewTask.class));
-    taskvec.add(new TaskTuple<FunctionWork>(FunctionWork.class,
-        FunctionTask.class));
     taskvec
         .add(new TaskTuple<ExplainWork>(ExplainWork.class, ExplainTask.class));
     taskvec
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 2ae1db5..691f3ee 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -116,7 +116,6 @@ import 
org.apache.hadoop.hive.ql.ddl.table.partition.AlterTableAddPartitionDesc;
 import 
org.apache.hadoop.hive.ql.ddl.table.partition.AlterTableDropPartitionDesc;
 import org.apache.hadoop.hive.ql.exec.AbstractFileMergeOperator;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
-import org.apache.hadoop.hive.ql.exec.FunctionTask;
 import org.apache.hadoop.hive.ql.exec.FunctionUtils;
 import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -262,7 +261,7 @@ public class Hive {
         LOG.info("Registering function " + functionName + " " + 
function.getClassName());
         String qualFunc = FunctionUtils.qualifyFunctionName(functionName, 
function.getDbName());
         FunctionRegistry.registerPermanentFunction(qualFunc, 
function.getClassName(), false,
-                    
FunctionTask.toFunctionResource(function.getResourceUris()));
+                    
FunctionUtils.toFunctionResource(function.getResourceUris()));
         registryFunctions.remove(qualFunc);
       } catch (Exception e) {
         LOG.warn("Failed to register persistent function " +
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/QueryPlanPostProcessor.java 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/QueryPlanPostProcessor.java
index 74a4be4..51a5ca0 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/optimizer/QueryPlanPostProcessor.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/optimizer/QueryPlanPostProcessor.java
@@ -40,7 +40,6 @@ import org.apache.hadoop.hive.ql.plan.ExplainSQRewriteWork;
 import org.apache.hadoop.hive.ql.plan.ExplainWork;
 import org.apache.hadoop.hive.ql.plan.FetchWork;
 import org.apache.hadoop.hive.ql.plan.FileSinkDesc;
-import org.apache.hadoop.hive.ql.plan.FunctionWork;
 import org.apache.hadoop.hive.ql.plan.MapredLocalWork;
 import org.apache.hadoop.hive.ql.plan.MapredWork;
 import org.apache.hadoop.hive.ql.plan.MoveWork;
@@ -112,7 +111,6 @@ public class QueryPlanPostProcessor {
           work instanceof DependencyCollectionWork ||
           work instanceof ExplainSQRewriteWork ||
           work instanceof FetchWork ||
-          work instanceof FunctionWork ||
           work instanceof MoveWork ||
           work instanceof BasicStatsNoJobWork ||
           work instanceof StatsWork) {
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
index 2cfcc6b..7f167a2 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/FunctionSemanticAnalyzer.java
@@ -29,6 +29,10 @@ import org.apache.hadoop.hive.metastore.api.ResourceType;
 import org.apache.hadoop.hive.metastore.api.ResourceUri;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.function.CreateFunctionDesc;
+import org.apache.hadoop.hive.ql.ddl.function.DropFunctionDesc;
+import org.apache.hadoop.hive.ql.ddl.function.ReloadFunctionsDesc;
 import org.apache.hadoop.hive.ql.exec.FunctionInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.FunctionUtils;
@@ -36,10 +40,6 @@ import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.hooks.Entity.Type;
 import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
-import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
-import org.apache.hadoop.hive.ql.plan.ReloadFunctionDesc;
-import org.apache.hadoop.hive.ql.plan.DropFunctionDesc;
-import org.apache.hadoop.hive.ql.plan.FunctionWork;
 import org.apache.hadoop.hive.ql.plan.PlanUtils;
 
 /**
@@ -60,8 +60,8 @@ public class FunctionSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       analyzeCreateFunction(ast);
     } else if (ast.getType() == HiveParser.TOK_DROPFUNCTION) {
       analyzeDropFunction(ast);
-    } else if (ast.getType() == HiveParser.TOK_RELOADFUNCTION) {
-      rootTasks.add(TaskFactory.get(new FunctionWork(new 
ReloadFunctionDesc())));
+    } else if (ast.getType() == HiveParser.TOK_RELOADFUNCTIONS) {
+      rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), new 
ReloadFunctionsDesc())));
     }
 
     LOG.info("analyze done");
@@ -85,8 +85,8 @@ public class FunctionSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     }
 
     CreateFunctionDesc desc =
-        new CreateFunctionDesc(functionName, isTemporaryFunction, className, 
resources, null);
-    rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
+        new CreateFunctionDesc(functionName, className, isTemporaryFunction, 
resources, null);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
desc)));
 
     addEntities(functionName, className, isTemporaryFunction, resources);
   }
@@ -114,7 +114,7 @@ public class FunctionSemanticAnalyzer extends 
BaseSemanticAnalyzer {
 
     boolean isTemporaryFunction = 
(ast.getFirstChildWithType(HiveParser.TOK_TEMPORARY) != null);
     DropFunctionDesc desc = new DropFunctionDesc(functionName, 
isTemporaryFunction, null);
-    rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
desc)));
 
     addEntities(functionName, info.getClassName(), isTemporaryFunction, null);
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
index 223bbd4..a4b233a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
@@ -247,7 +247,7 @@ TOK_STRINGLITERALSEQUENCE;
 TOK_CHARSETLITERAL;
 TOK_CREATEFUNCTION;
 TOK_DROPFUNCTION;
-TOK_RELOADFUNCTION;
+TOK_RELOADFUNCTIONS;
 TOK_CREATEMACRO;
 TOK_DROPMACRO;
 TOK_TEMPORARY;
@@ -569,6 +569,7 @@ import org.apache.hadoop.hive.conf.HiveConf;
     xlateMap.put("KW_REGEXP", "REGEXP");
     xlateMap.put("KW_TEMPORARY", "TEMPORARY");
     xlateMap.put("KW_FUNCTION", "FUNCTION");
+    xlateMap.put("KW_FUNCTIONS", "FUNCTIONS");
     xlateMap.put("KW_EXPLAIN", "EXPLAIN");
     xlateMap.put("KW_EXTENDED", "EXTENDED");
     xlateMap.put("KW_DEBUG", "DEBUG");
@@ -987,7 +988,7 @@ ddlStatement
     | createFunctionStatement
     | createMacroStatement
     | dropFunctionStatement
-    | reloadFunctionStatement
+    | reloadFunctionsStatement
     | dropMacroStatement
     | analyzeStatement
     | lockStatement
@@ -1944,10 +1945,10 @@ dropFunctionStatement
     ->                  ^(TOK_DROPFUNCTION functionIdentifier ifExists?)
     ;
 
-reloadFunctionStatement
-@init { pushMsg("reload function statement", state); }
+reloadFunctionsStatement
+@init { pushMsg("reload functions statement", state); }
 @after { popMsg(state); }
-    : KW_RELOAD KW_FUNCTION -> ^(TOK_RELOADFUNCTION);
+    : KW_RELOAD (KW_FUNCTIONS|KW_FUNCTION) -> ^(TOK_RELOADFUNCTIONS);
 
 createMacroStatement
 @init { pushMsg("create macro statement", state); }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
index 88b6068..857a5af 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/MacroSemanticAnalyzer.java
@@ -37,6 +37,9 @@ import org.apache.hadoop.hive.metastore.api.Database;
 import org.apache.hadoop.hive.metastore.api.FieldSchema;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryState;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.function.CreateMacroDesc;
+import org.apache.hadoop.hive.ql.ddl.function.DropMacroDesc;
 import org.apache.hadoop.hive.ql.exec.ColumnInfo;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.FunctionUtils;
@@ -45,10 +48,7 @@ import org.apache.hadoop.hive.ql.hooks.WriteEntity;
 import org.apache.hadoop.hive.ql.lib.Dispatcher;
 import org.apache.hadoop.hive.ql.lib.Node;
 import org.apache.hadoop.hive.ql.lib.PreOrderWalker;
-import org.apache.hadoop.hive.ql.plan.CreateMacroDesc;
-import org.apache.hadoop.hive.ql.plan.DropMacroDesc;
 import org.apache.hadoop.hive.ql.plan.ExprNodeDesc;
-import org.apache.hadoop.hive.ql.plan.FunctionWork;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
 
@@ -76,7 +76,6 @@ public class MacroSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     }
   }
 
-  @SuppressWarnings("unchecked")
   private void analyzeCreateMacro(ASTNode ast) throws SemanticException {
     String functionName = ast.getChild(0).getText();
 
@@ -140,12 +139,11 @@ public class MacroSemanticAnalyzer extends 
BaseSemanticAnalyzer {
         body = sa.genExprNodeDesc((ASTNode)ast.getChild(2), rowResolver);
     }
     CreateMacroDesc desc = new CreateMacroDesc(functionName, macroColNames, 
macroColTypes, body);
-    rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
desc)));
 
     addEntities();
   }
 
-  @SuppressWarnings("unchecked")
   private void analyzeDropMacro(ASTNode ast) throws SemanticException {
     String functionName = ast.getChild(0).getText();
     boolean ifExists = (ast.getFirstChildWithType(TOK_IFEXISTS) != null);
@@ -164,7 +162,7 @@ public class MacroSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     }
 
     DropMacroDesc desc = new DropMacroDesc(functionName);
-    rootTasks.add(TaskFactory.get(new FunctionWork(desc)));
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
desc)));
 
     addEntities();
   }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
index 51a6b2a..f655ae9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
@@ -83,7 +83,7 @@ public final class SemanticAnalyzerFactory {
     commandType.put(HiveParser.TOK_SHOWMATERIALIZEDVIEWS, 
HiveOperation.SHOWMATERIALIZEDVIEWS);
     commandType.put(HiveParser.TOK_CREATEFUNCTION, 
HiveOperation.CREATEFUNCTION);
     commandType.put(HiveParser.TOK_DROPFUNCTION, HiveOperation.DROPFUNCTION);
-    commandType.put(HiveParser.TOK_RELOADFUNCTION, 
HiveOperation.RELOADFUNCTION);
+    commandType.put(HiveParser.TOK_RELOADFUNCTIONS, 
HiveOperation.RELOADFUNCTION);
     commandType.put(HiveParser.TOK_CREATEMACRO, HiveOperation.CREATEMACRO);
     commandType.put(HiveParser.TOK_DROPMACRO, HiveOperation.DROPMACRO);
     commandType.put(HiveParser.TOK_CREATEVIEW, HiveOperation.CREATEVIEW);
@@ -356,7 +356,7 @@ public final class SemanticAnalyzerFactory {
 
       case HiveParser.TOK_CREATEFUNCTION:
       case HiveParser.TOK_DROPFUNCTION:
-      case HiveParser.TOK_RELOADFUNCTION:
+      case HiveParser.TOK_RELOADFUNCTIONS:
         return new FunctionSemanticAnalyzer(queryState);
 
       case HiveParser.TOK_ANALYZE:
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
index 3a32885..c5a0519 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/CreateFunctionHandler.java
@@ -27,6 +27,8 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.metastore.ReplChangeManager;
 import org.apache.hadoop.hive.metastore.api.ResourceUri;
 import org.apache.hadoop.hive.ql.ErrorMsg;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.function.CreateFunctionDesc;
 import org.apache.hadoop.hive.ql.exec.FunctionUtils;
 import org.apache.hadoop.hive.ql.exec.ReplCopyTask;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -36,9 +38,7 @@ import org.apache.hadoop.hive.ql.parse.ReplicationSpec;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
 import org.apache.hadoop.hive.ql.parse.repl.PathBuilder;
 import org.apache.hadoop.hive.ql.parse.repl.load.MetaData;
-import org.apache.hadoop.hive.ql.plan.CreateFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.DependencyCollectionWork;
-import org.apache.hadoop.hive.ql.plan.FunctionWork;
 
 import java.io.IOException;
 import java.io.Serializable;
@@ -64,10 +64,10 @@ public class CreateFunctionHandler extends 
AbstractMessageHandler {
       this.functionName = builder.metadata.function.getFunctionName();
 
       context.log.debug("Loading function desc : {}", descToLoad.toString());
-      Task<FunctionWork> createTask = TaskFactory.get(
-          new FunctionWork(descToLoad), context.hiveConf);
+      Task<DDLWork> createTask = TaskFactory.get(
+          new DDLWork(readEntitySet, writeEntitySet, descToLoad), 
context.hiveConf);
       context.log.debug("Added create function task : {}:{},{}", 
createTask.getId(),
-          descToLoad.getFunctionName(), descToLoad.getClassName());
+          descToLoad.getName(), descToLoad.getClassName());
       // This null check is specifically done as the same class is used to 
handle both incremental and
       // bootstrap replication scenarios for create function. When doing 
bootstrap we do not have
       // event id for this event but rather when bootstrap started and hence 
we pass in null dmd for
@@ -140,7 +140,7 @@ public class CreateFunctionHandler extends 
AbstractMessageHandler {
       // Only for incremental load, need to validate if event is newer than 
the database.
       ReplicationSpec replSpec = (context.dmd == null) ? null : 
context.eventOnlyReplicationSpec();
       return new CreateFunctionDesc(
-              fullQualifiedFunctionName, false, 
metadata.function.getClassName(),
+              fullQualifiedFunctionName, metadata.function.getClassName(), 
false,
               transformedUris, replSpec
       );
     }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
index fee2bb5..11203f1 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/repl/load/message/DropFunctionHandler.java
@@ -18,12 +18,12 @@
 package org.apache.hadoop.hive.ql.parse.repl.load.message;
 
 import org.apache.hadoop.hive.metastore.messaging.DropFunctionMessage;
+import org.apache.hadoop.hive.ql.ddl.DDLWork;
+import org.apache.hadoop.hive.ql.ddl.function.DropFunctionDesc;
 import org.apache.hadoop.hive.ql.exec.FunctionUtils;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskFactory;
 import org.apache.hadoop.hive.ql.parse.SemanticException;
-import org.apache.hadoop.hive.ql.plan.DropFunctionDesc;
-import org.apache.hadoop.hive.ql.plan.FunctionWork;
 
 import java.io.Serializable;
 import java.util.Collections;
@@ -39,9 +39,10 @@ public class DropFunctionHandler extends 
AbstractMessageHandler {
         FunctionUtils.qualifyFunctionName(msg.getFunctionName(), actualDbName);
     DropFunctionDesc desc = new DropFunctionDesc(
             qualifiedFunctionName, false, context.eventOnlyReplicationSpec());
-    Task<FunctionWork> dropFunctionTask = TaskFactory.get(new 
FunctionWork(desc), context.hiveConf);
+    Task<DDLWork> dropFunctionTask =
+        TaskFactory.get(new DDLWork(readEntitySet, writeEntitySet, desc), 
context.hiveConf);
     context.log.debug(
-        "Added drop function task : {}:{}", dropFunctionTask.getId(), 
desc.getFunctionName()
+        "Added drop function task : {}:{}", dropFunctionTask.getId(), 
desc.getName()
     );
     updatedMetadata.set(context.dmd.getEventTo().toString(), actualDbName, 
null, null);
     return Collections.singletonList(dropFunctionTask);
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java
deleted file mode 100644
index f34c02c..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateMacroDesc.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.plan;
-
-import java.io.Serializable;
-import java.util.List;
-
-import org.apache.hadoop.hive.ql.plan.Explain.Level;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-
-/**
- * CreateMacroDesc.
- *
- */
-@Explain(displayName = "Create Macro", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
-public class CreateMacroDesc implements Serializable {
-  private static final long serialVersionUID = 1L;
-
-  private String macroName;
-  private List<String> colNames;
-  private List<TypeInfo> colTypes;
-  private ExprNodeDesc body;
-
-  /**
-   * For serialization only.
-   */
-  public CreateMacroDesc() {
-  }
-
-  public CreateMacroDesc(String macroName,
-                         List<String> colNames,
-                         List<TypeInfo> colTypes,
-                         ExprNodeDesc body) {
-    this.macroName = macroName;
-    this.colNames = colNames;
-    this.colTypes = colTypes;
-    this.body = body;
-  }
-
-  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
-  public String getMacroName() {
-    return macroName;
-  }
-
-  public ExprNodeDesc getBody() {
-    return body;
-  }
-
-  public List<String> getColNames() {
-    return colNames;
-  }
-
-  public List<TypeInfo> getColTypes() {
-    return colTypes;
-  }
-
-}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java
deleted file mode 100644
index cacbe62..0000000
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/FunctionWork.java
+++ /dev/null
@@ -1,93 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hive.ql.plan;
-
-import java.io.Serializable;
-
-/**
- * FunctionWork.
- *
- */
-public class FunctionWork implements Serializable {
-  private static final long serialVersionUID = 1L;
-  private CreateFunctionDesc createFunctionDesc;
-  private DropFunctionDesc dropFunctionDesc;
-  private ReloadFunctionDesc reloadFunctionDesc;
-  private CreateMacroDesc createMacroDesc;
-  private DropMacroDesc dropMacroDesc;
-
-  /**
-   * For serialization only.
-   */
-  public FunctionWork() {
-  }
-  
-  public FunctionWork(CreateFunctionDesc createFunctionDesc) {
-    this.createFunctionDesc = createFunctionDesc;
-  }
-
-  public FunctionWork(DropFunctionDesc dropFunctionDesc) {
-    this.dropFunctionDesc = dropFunctionDesc;
-  }
-
-  public FunctionWork(ReloadFunctionDesc reloadFunctionDesc) {
-    this.reloadFunctionDesc = reloadFunctionDesc;
-  }
-
-  public FunctionWork(CreateMacroDesc createMacroDesc) {
-    this.createMacroDesc = createMacroDesc;
-  }
-
-  public FunctionWork(DropMacroDesc dropMacroDesc) {
-    this.dropMacroDesc = dropMacroDesc;
-  }
-
-  public CreateFunctionDesc getCreateFunctionDesc() {
-    return createFunctionDesc;
-  }
-
-  public void setCreateFunctionDesc(CreateFunctionDesc createFunctionDesc) {
-    this.createFunctionDesc = createFunctionDesc;
-  }
-
-  public DropFunctionDesc getDropFunctionDesc() {
-    return dropFunctionDesc;
-  }
-
-  public void setDropFunctionDesc(DropFunctionDesc dropFunctionDesc) {
-    this.dropFunctionDesc = dropFunctionDesc;
-  }
-
-  public ReloadFunctionDesc getReloadFunctionDesc() {
-    return reloadFunctionDesc;
-  }
-
-  public void setReloadFunctionDesc(ReloadFunctionDesc reloadFunctionDesc) {
-    this.reloadFunctionDesc = reloadFunctionDesc;
-  }
-
-  public CreateMacroDesc getCreateMacroDesc() {
-    return createMacroDesc;
-  }
-
-  public DropMacroDesc getDropMacroDesc() {
-    return dropMacroDesc;
-  }
-
-}
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java 
b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
index 2189ad6..e29e778 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/parse/TestMacroSemanticAnalyzer.java
@@ -22,6 +22,7 @@ import java.util.List;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.Context;
+import org.apache.hadoop.hive.ql.DriverContext;
 import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -56,6 +57,8 @@ public class TestMacroSemanticAnalyzer {
     List<Task<?>> rootTasks = analyzer.getRootTasks();
     Assert.assertEquals(1, rootTasks.size());
     for (Task<?> task : rootTasks) {
+      task.setDriverContext(new DriverContext(context));
+      task.setConf(conf);
       Assert.assertEquals(0, task.executeTask(null));
     }
   }
diff --git 
a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java 
b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java
index 48906f7..47849a1 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestCreateMacroDesc.java
@@ -21,7 +21,7 @@ import java.util.ArrayList;
 import java.util.List;
 
 import org.junit.Assert;
-
+import org.apache.hadoop.hive.ql.ddl.function.CreateMacroDesc;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
 import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
 import org.junit.Before;
@@ -46,9 +46,9 @@ public class TestCreateMacroDesc {
   @Test
   public void testCreateMacroDesc() throws Exception {
     CreateMacroDesc desc = new CreateMacroDesc(name, colNames, colTypes, 
bodyDesc);
-    Assert.assertEquals(name, desc.getMacroName());
+    Assert.assertEquals(name, desc.getName());
     Assert.assertEquals(bodyDesc, desc.getBody());
-    Assert.assertEquals(colNames, desc.getColNames());
-    Assert.assertEquals(colTypes, desc.getColTypes());
+    Assert.assertEquals(colNames, desc.getColumnNames());
+    Assert.assertEquals(colTypes, desc.getColumnTypes());
   }
 }
diff --git a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java 
b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
index 2ee27dc..5d553b4 100644
--- a/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
+++ b/ql/src/test/org/apache/hadoop/hive/ql/plan/TestDropMacroDesc.java
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.hive.ql.plan;
 
+import org.apache.hadoop.hive.ql.ddl.function.DropMacroDesc;
 import org.junit.Assert;
 
 import org.junit.Before;
@@ -31,6 +32,6 @@ public class TestDropMacroDesc {
   @Test
   public void testCreateMacroDesc() throws Exception {
     DropMacroDesc desc = new DropMacroDesc(name);
-    Assert.assertEquals(name, desc.getMacroName());
+    Assert.assertEquals(name, desc.getName());
   }
 }
diff --git a/ql/src/test/queries/clientpositive/create_func1.q 
b/ql/src/test/queries/clientpositive/create_func1.q
index 507fbef..2c6acfc 100644
--- a/ql/src/test/queries/clientpositive/create_func1.q
+++ b/ql/src/test/queries/clientpositive/create_func1.q
@@ -6,6 +6,7 @@ select qtest_get_java_boolean('true'), 
qtest_get_java_boolean('false') from src
 describe function extended qtest_get_java_boolean;
 
 create database mydb;
+explain create function mydb.func1 as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper';
 create function mydb.func1 as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper';
 
 show functions like mydb.func1;
@@ -15,11 +16,25 @@ describe function extended mydb.func1;
 
 select mydb.func1('abc') from src limit 1;
 
+explain drop function mydb.func1;
 drop function mydb.func1;
 
 -- function should now be gone
 show functions like mydb.func1;
 
+explain create temporary function temp_func1 as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper';
+create temporary function temp_func1 as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper';
+
+explain drop temporary function temp_func1;
+drop temporary function temp_func1;
+
+explain reload functions;
+reload functions;
+
+-- old format, still supported due to backward compatibility
+explain reload function;
+reload function;
+
 -- To test function name resolution
 create function mydb.qtest_get_java_boolean as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper';
 
diff --git 
a/ql/src/test/results/clientnegative/create_function_nonexistent_class.q.out 
b/ql/src/test/results/clientnegative/create_function_nonexistent_class.q.out
index 55e66f8..49eb393 100644
--- a/ql/src/test/results/clientnegative/create_function_nonexistent_class.q.out
+++ b/ql/src/test/results/clientnegative/create_function_nonexistent_class.q.out
@@ -2,5 +2,5 @@ PREHOOK: query: create function default.badfunc as 
'my.nonexistent.class'
 PREHOOK: type: CREATEFUNCTION
 PREHOOK: Output: database:default
 PREHOOK: Output: default.badfunc
-FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.FunctionTask. Unable to load UDF class: 
java.lang.ClassNotFoundException: my.nonexistent.class
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.ddl.DDLTask. Unable to load UDF class: 
java.lang.ClassNotFoundException: my.nonexistent.class
 Please ensure that the JAR file containing this class has been properly 
installed in the auxiliary directory or was added with ADD JAR command.
diff --git 
a/ql/src/test/results/clientnegative/create_function_nonudf_class.q.out 
b/ql/src/test/results/clientnegative/create_function_nonudf_class.q.out
index 6d5427e..d90bc2b 100644
--- a/ql/src/test/results/clientnegative/create_function_nonudf_class.q.out
+++ b/ql/src/test/results/clientnegative/create_function_nonudf_class.q.out
@@ -3,4 +3,4 @@ PREHOOK: type: CREATEFUNCTION
 PREHOOK: Output: database:default
 PREHOOK: Output: default.badfunc
 Failed to register default.badfunc using class java.lang.String
-FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.FunctionTask
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.ddl.DDLTask
diff --git a/ql/src/test/results/clientnegative/create_unknown_genericudf.q.out 
b/ql/src/test/results/clientnegative/create_unknown_genericudf.q.out
index ad1371d..7ef0568 100644
--- a/ql/src/test/results/clientnegative/create_unknown_genericudf.q.out
+++ b/ql/src/test/results/clientnegative/create_unknown_genericudf.q.out
@@ -2,4 +2,4 @@ PREHOOK: query: CREATE TEMPORARY FUNCTION dummy_genericudf AS 
'org.apache.hadoop
 PREHOOK: type: CREATEFUNCTION
 PREHOOK: Output: dummy_genericudf
 FAILED: Class org.apache.hadoop.hive.ql.udf.generic.DummyGenericUDF not found
-FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.FunctionTask
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.ddl.DDLTask
diff --git a/ql/src/test/results/clientnegative/create_unknown_udf_udaf.q.out 
b/ql/src/test/results/clientnegative/create_unknown_udf_udaf.q.out
index bfb72b4..e96e07d 100644
--- a/ql/src/test/results/clientnegative/create_unknown_udf_udaf.q.out
+++ b/ql/src/test/results/clientnegative/create_unknown_udf_udaf.q.out
@@ -2,4 +2,4 @@ PREHOOK: query: CREATE TEMPORARY FUNCTION dummy_function AS 
'org.apache.hadoop.h
 PREHOOK: type: CREATEFUNCTION
 PREHOOK: Output: dummy_function
 FAILED: Class org.apache.hadoop.hive.ql.udf.DummyFunction not found
-FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.FunctionTask
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.ddl.DDLTask
diff --git a/ql/src/test/results/clientnegative/ivyDownload.q.out 
b/ql/src/test/results/clientnegative/ivyDownload.q.out
index e1fe823..6db994d 100644
--- a/ql/src/test/results/clientnegative/ivyDownload.q.out
+++ b/ql/src/test/results/clientnegative/ivyDownload.q.out
@@ -2,4 +2,4 @@ PREHOOK: query: CREATE TEMPORARY FUNCTION example_add AS 
'UDFExampleAdd'
 PREHOOK: type: CREATEFUNCTION
 PREHOOK: Output: example_add
 FAILED: Class UDFExampleAdd not found
-FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.FunctionTask
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.ddl.DDLTask
diff --git 
a/ql/src/test/results/clientnegative/udf_function_does_not_implement_udf.q.out 
b/ql/src/test/results/clientnegative/udf_function_does_not_implement_udf.q.out
index ab42da7..baa34be 100644
--- 
a/ql/src/test/results/clientnegative/udf_function_does_not_implement_udf.q.out
+++ 
b/ql/src/test/results/clientnegative/udf_function_does_not_implement_udf.q.out
@@ -2,4 +2,4 @@ PREHOOK: query: CREATE TEMPORARY FUNCTION moo AS 
'org.apache.hadoop.hive.ql.Driv
 PREHOOK: type: CREATEFUNCTION
 PREHOOK: Output: moo
 FAILED: Class org.apache.hadoop.hive.ql.Driver does not implement UDF, 
GenericUDF, or UDAF
-FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.FunctionTask
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.ddl.DDLTask
diff --git a/ql/src/test/results/clientnegative/udf_local_resource.q.out 
b/ql/src/test/results/clientnegative/udf_local_resource.q.out
index 62664c9..bfe5492 100644
--- a/ql/src/test/results/clientnegative/udf_local_resource.q.out
+++ b/ql/src/test/results/clientnegative/udf_local_resource.q.out
@@ -3,4 +3,4 @@ PREHOOK: type: CREATEFUNCTION
 PREHOOK: Output: database:default
 PREHOOK: Output: default.lookup
 PREHOOK: Output: hdfs://### HDFS PATH ###
-FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.FunctionTask. Hive warehouse is non-local, but 
../../data/files/sales.txt specifies file on local filesystem. Resources on 
non-local warehouse should specify a non-local scheme/path
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.ddl.DDLTask. Hive warehouse is non-local, but 
../../data/files/sales.txt specifies file on local filesystem. Resources on 
non-local warehouse should specify a non-local scheme/path
diff --git a/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out 
b/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out
index 4751761..ea62043 100644
--- a/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out
+++ b/ql/src/test/results/clientnegative/udf_nonexistent_resource.q.out
@@ -5,4 +5,4 @@ PREHOOK: Output: default.lookup
 #### A masked pattern was here ####
 nonexistent_file.txt does not exist
 Failed to register default.lookup using class 
org.apache.hadoop.hive.ql.udf.UDFFileLookup
-FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.FunctionTask
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.ddl.DDLTask
diff --git a/ql/src/test/results/clientpositive/create_func1.q.out 
b/ql/src/test/results/clientpositive/create_func1.q.out
index b1923c7..238d378 100644
--- a/ql/src/test/results/clientpositive/create_func1.q.out
+++ b/ql/src/test/results/clientpositive/create_func1.q.out
@@ -21,6 +21,23 @@ PREHOOK: Output: database:mydb
 POSTHOOK: query: create database mydb
 POSTHOOK: type: CREATEDATABASE
 POSTHOOK: Output: database:mydb
+PREHOOK: query: explain create function mydb.func1 as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper'
+PREHOOK: type: CREATEFUNCTION
+PREHOOK: Output: database:mydb
+PREHOOK: Output: mydb.func1
+POSTHOOK: query: explain create function mydb.func1 as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper'
+POSTHOOK: type: CREATEFUNCTION
+POSTHOOK: Output: database:mydb
+POSTHOOK: Output: mydb.func1
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Create Function
+      class: org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper
+      name: mydb.func1
+
 PREHOOK: query: create function mydb.func1 as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper'
 PREHOOK: type: CREATEFUNCTION
 PREHOOK: Output: database:mydb
@@ -54,6 +71,22 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: default@src
 #### A masked pattern was here ####
 ABC
+PREHOOK: query: explain drop function mydb.func1
+PREHOOK: type: DROPFUNCTION
+PREHOOK: Output: database:mydb
+PREHOOK: Output: mydb.func1
+POSTHOOK: query: explain drop function mydb.func1
+POSTHOOK: type: DROPFUNCTION
+POSTHOOK: Output: database:mydb
+POSTHOOK: Output: mydb.func1
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Drop Function
+      name: mydb.func1
+
 PREHOOK: query: drop function mydb.func1
 PREHOOK: type: DROPFUNCTION
 PREHOOK: Output: database:mydb
@@ -66,6 +99,75 @@ PREHOOK: query: show functions like mydb.func1
 PREHOOK: type: SHOWFUNCTIONS
 POSTHOOK: query: show functions like mydb.func1
 POSTHOOK: type: SHOWFUNCTIONS
+PREHOOK: query: explain create temporary function temp_func1 as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper'
+PREHOOK: type: CREATEFUNCTION
+PREHOOK: Output: temp_func1
+POSTHOOK: query: explain create temporary function temp_func1 as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper'
+POSTHOOK: type: CREATEFUNCTION
+POSTHOOK: Output: temp_func1
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Create Function
+      class: org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper
+      name: temp_func1
+      temporary: true
+
+PREHOOK: query: create temporary function temp_func1 as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper'
+PREHOOK: type: CREATEFUNCTION
+PREHOOK: Output: temp_func1
+POSTHOOK: query: create temporary function temp_func1 as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper'
+POSTHOOK: type: CREATEFUNCTION
+POSTHOOK: Output: temp_func1
+PREHOOK: query: explain drop temporary function temp_func1
+PREHOOK: type: DROPFUNCTION
+PREHOOK: Output: temp_func1
+POSTHOOK: query: explain drop temporary function temp_func1
+POSTHOOK: type: DROPFUNCTION
+POSTHOOK: Output: temp_func1
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Drop Function
+      name: temp_func1
+      temporary: true
+
+PREHOOK: query: drop temporary function temp_func1
+PREHOOK: type: DROPFUNCTION
+PREHOOK: Output: temp_func1
+POSTHOOK: query: drop temporary function temp_func1
+POSTHOOK: type: DROPFUNCTION
+POSTHOOK: Output: temp_func1
+PREHOOK: query: explain reload functions
+PREHOOK: type: RELOADFUNCTION
+POSTHOOK: query: explain reload functions
+POSTHOOK: type: RELOADFUNCTION
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Reload Functions
+
+PREHOOK: query: explain reload function
+PREHOOK: type: RELOADFUNCTION
+POSTHOOK: query: explain reload function
+POSTHOOK: type: RELOADFUNCTION
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+
+STAGE PLANS:
+  Stage: Stage-0
+    Reload Functions
+
+PREHOOK: query: reload function
+PREHOOK: type: RELOADFUNCTION
+POSTHOOK: query: reload function
+POSTHOOK: type: RELOADFUNCTION
 PREHOOK: query: create function mydb.qtest_get_java_boolean as 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFUpper'
 PREHOOK: type: CREATEFUNCTION
 PREHOOK: Output: database:mydb
diff --git a/ql/src/test/results/clientpositive/create_genericudaf.q.out 
b/ql/src/test/results/clientpositive/create_genericudaf.q.out
index ca877bf..4792a70 100644
--- a/ql/src/test/results/clientpositive/create_genericudaf.q.out
+++ b/ql/src/test/results/clientpositive/create_genericudaf.q.out
@@ -11,6 +11,10 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
+    Create Function
+      class: org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage
+      name: test_avg
+      temporary: true
 
 PREHOOK: query: CREATE TEMPORARY FUNCTION test_avg AS 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDAFAverage'
 PREHOOK: type: CREATEFUNCTION
diff --git a/ql/src/test/results/clientpositive/create_genericudf.q.out 
b/ql/src/test/results/clientpositive/create_genericudf.q.out
index cfe14f5..c9ee1e8 100644
--- a/ql/src/test/results/clientpositive/create_genericudf.q.out
+++ b/ql/src/test/results/clientpositive/create_genericudf.q.out
@@ -11,6 +11,10 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
+    Create Function
+      class: org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate
+      name: test_translate
+      temporary: true
 
 PREHOOK: query: CREATE TEMPORARY FUNCTION test_translate AS 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestTranslate'
 PREHOOK: type: CREATEFUNCTION
diff --git a/ql/src/test/results/clientpositive/create_udaf.q.out 
b/ql/src/test/results/clientpositive/create_udaf.q.out
index 8e20b30..7bfce12 100644
--- a/ql/src/test/results/clientpositive/create_udaf.q.out
+++ b/ql/src/test/results/clientpositive/create_udaf.q.out
@@ -11,6 +11,10 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
+    Create Function
+      class: org.apache.hadoop.hive.ql.udf.UDAFTestMax
+      name: test_max
+      temporary: true
 
 PREHOOK: query: CREATE TEMPORARY FUNCTION test_max AS 
'org.apache.hadoop.hive.ql.udf.UDAFTestMax'
 PREHOOK: type: CREATEFUNCTION
diff --git a/ql/src/test/results/clientpositive/drop_udf.q.out 
b/ql/src/test/results/clientpositive/drop_udf.q.out
index 27dd986..0b63601 100644
--- a/ql/src/test/results/clientpositive/drop_udf.q.out
+++ b/ql/src/test/results/clientpositive/drop_udf.q.out
@@ -17,6 +17,9 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
+    Drop Function
+      name: test_translate
+      temporary: true
 
 PREHOOK: query: DROP TEMPORARY FUNCTION test_translate
 PREHOOK: type: DROPFUNCTION
diff --git a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out 
b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
index 6925f58..52cde21 100644
--- a/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainanalyze_3.q.out
@@ -323,6 +323,7 @@ CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + 
EXP(-x))
 POSTHOOK: type: CREATEMACRO
 POSTHOOK: Output: database:default
 Stage-0
+  Create Macro{"body:":"GenericUDFOPDivide(Const decimal(1,0) 1, 
GenericUDFOPPlus(Const decimal(1,0) 1, GenericUDFBridge ==> exp 
(GenericUDFOPNegative(Column[x]))))","column names:":["x"],"column 
types:":["double"],"name:":"SIGMOID"}
 
 PREHOOK: query: CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))
 PREHOOK: type: CREATEMACRO
@@ -370,6 +371,7 @@ POSTHOOK: query: explain analyze DROP TEMPORARY MACRO 
SIGMOID
 POSTHOOK: type: DROPMACRO
 POSTHOOK: Output: database:default
 Stage-0
+  Drop Macro{"name:":"SIGMOID"}
 
 PREHOOK: query: DROP TEMPORARY MACRO SIGMOID
 PREHOOK: type: DROPMACRO
diff --git a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out 
b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
index 26eae7e..4d58f5e 100644
--- a/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
+++ b/ql/src/test/results/clientpositive/tez/explainuser_3.q.out
@@ -275,6 +275,7 @@ CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + 
EXP(-x))
 POSTHOOK: type: CREATEMACRO
 POSTHOOK: Output: database:default
 Stage-0
+  Create Macro{"body:":"GenericUDFOPDivide(Const decimal(1,0) 1, 
GenericUDFOPPlus(Const decimal(1,0) 1, GenericUDFBridge ==> exp 
(GenericUDFOPNegative(Column[x]))))","column names:":["x"],"column 
types:":["double"],"name:":"SIGMOID"}
 
 PREHOOK: query: CREATE TEMPORARY MACRO SIGMOID (x DOUBLE) 1.0 / (1.0 + EXP(-x))
 PREHOOK: type: CREATEMACRO
@@ -308,6 +309,7 @@ POSTHOOK: query: explain DROP TEMPORARY MACRO SIGMOID
 POSTHOOK: type: DROPMACRO
 POSTHOOK: Output: database:default
 Stage-0
+  Drop Macro{"name:":"SIGMOID"}
 
 PREHOOK: query: DROP TEMPORARY MACRO SIGMOID
 PREHOOK: type: DROPMACRO
diff --git a/ql/src/test/results/clientpositive/udf_compare_java_string.q.out 
b/ql/src/test/results/clientpositive/udf_compare_java_string.q.out
index 75d0124..08b3d7f 100644
--- a/ql/src/test/results/clientpositive/udf_compare_java_string.q.out
+++ b/ql/src/test/results/clientpositive/udf_compare_java_string.q.out
@@ -11,6 +11,10 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
+    Create Function
+      class: org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaString
+      name: test_udf_get_java_string
+      temporary: true
 
 PREHOOK: query: CREATE TEMPORARY FUNCTION test_udf_get_java_string AS 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaString'
 PREHOOK: type: CREATEFUNCTION
diff --git a/ql/src/test/results/clientpositive/udf_logic_java_boolean.q.out 
b/ql/src/test/results/clientpositive/udf_logic_java_boolean.q.out
index 0d63db7..352973c 100644
--- a/ql/src/test/results/clientpositive/udf_logic_java_boolean.q.out
+++ b/ql/src/test/results/clientpositive/udf_logic_java_boolean.q.out
@@ -11,6 +11,10 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
+    Create Function
+      class: org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean
+      name: test_udf_get_java_boolean
+      temporary: true
 
 PREHOOK: query: CREATE TEMPORARY FUNCTION test_udf_get_java_boolean AS 
'org.apache.hadoop.hive.ql.udf.generic.GenericUDFTestGetJavaBoolean'
 PREHOOK: type: CREATEFUNCTION
diff --git a/ql/src/test/results/clientpositive/udf_testlength.q.out 
b/ql/src/test/results/clientpositive/udf_testlength.q.out
index 23a25d0..8f0a69b 100644
--- a/ql/src/test/results/clientpositive/udf_testlength.q.out
+++ b/ql/src/test/results/clientpositive/udf_testlength.q.out
@@ -11,6 +11,10 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
+    Create Function
+      class: org.apache.hadoop.hive.ql.udf.UDFTestLength
+      name: testlength
+      temporary: true
 
 PREHOOK: query: CREATE TEMPORARY FUNCTION testlength AS 
'org.apache.hadoop.hive.ql.udf.UDFTestLength'
 PREHOOK: type: CREATEFUNCTION
diff --git a/ql/src/test/results/clientpositive/udf_testlength2.q.out 
b/ql/src/test/results/clientpositive/udf_testlength2.q.out
index 1a67685..11ebd7a 100644
--- a/ql/src/test/results/clientpositive/udf_testlength2.q.out
+++ b/ql/src/test/results/clientpositive/udf_testlength2.q.out
@@ -11,6 +11,10 @@ STAGE DEPENDENCIES:
 
 STAGE PLANS:
   Stage: Stage-0
+    Create Function
+      class: org.apache.hadoop.hive.ql.udf.UDFTestLength2
+      name: testlength2
+      temporary: true
 
 PREHOOK: query: CREATE TEMPORARY FUNCTION testlength2 AS 
'org.apache.hadoop.hive.ql.udf.UDFTestLength2'
 PREHOOK: type: CREATEFUNCTION

Reply via email to