This is an automated email from the ASF dual-hosted git repository.

kgyrtkirk pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 7072e0b  HIVE-21567: Break up DDLTask - extract Function related 
operations (Miklos Gergely via Zoltan Haindrich)
7072e0b is described below

commit 7072e0ba310bfc6ccfbe6bb6ad545dbe2bf6897d
Author: Miklos Gergely <mgerg...@hortonworks.com>
AuthorDate: Mon Apr 8 17:41:39 2019 +0200

    HIVE-21567: Break up DDLTask - extract Function related operations (Miklos 
Gergely via Zoltan Haindrich)
    
    Signed-off-by: Zoltan Haindrich <k...@rxd.hu>
---
 .../hadoop/hive/ql/ddl/DDLOperationContext.java    |   9 +-
 .../org/apache/hadoop/hive/ql/ddl/DDLTask2.java    |   2 +-
 .../{plan => ddl/function}/DescFunctionDesc.java   |  89 ++++---------
 .../ql/ddl/function/DescFunctionOperation.java     | 114 ++++++++++++++++
 .../{plan => ddl/function}/ShowFunctionsDesc.java  |  96 ++++----------
 .../ql/ddl/function/ShowFunctionsOperation.java    |  90 +++++++++++++
 .../hadoop/hive/ql/ddl/function/package-info.java  |  20 +++
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java    | 145 ---------------------
 .../hadoop/hive/ql/parse/DDLSemanticAnalyzer.java  |  17 +--
 .../org/apache/hadoop/hive/ql/plan/DDLWork.java    |  38 ------
 ql/src/test/queries/clientpositive/desc_function.q |   5 +
 .../test/queries/clientpositive/show_functions.q   |   3 +
 .../results/clientpositive/desc_function.q.out     |  53 ++++++++
 .../results/clientpositive/show_functions.q.out    |  56 ++++++++
 14 files changed, 404 insertions(+), 333 deletions(-)

diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLOperationContext.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLOperationContext.java
index 14744d1..d5969cd 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLOperationContext.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLOperationContext.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.hive.ql.metadata.Hive;
 import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatUtils;
 import org.apache.hadoop.hive.ql.metadata.formatting.MetaDataFormatter;
+import org.apache.hadoop.hive.ql.session.SessionState.LogHelper;
 
 /**
  * Context for DDL operations.
@@ -39,9 +40,10 @@ public class DDLOperationContext {
   private final DDLWork2 work;
   private final QueryState queryState;
   private final QueryPlan queryPlan;
+  private final LogHelper console;
 
   public DDLOperationContext(HiveConf conf, DriverContext driverContext, 
DDLTask2 task, DDLWork2 work,
-      QueryState queryState, QueryPlan queryPlan) throws HiveException {
+      QueryState queryState, QueryPlan queryPlan, LogHelper console) throws 
HiveException {
     this.db = Hive.get(conf);
     this.conf = conf;
     this.driverContext = driverContext;
@@ -50,6 +52,7 @@ public class DDLOperationContext {
     this.work = work;
     this.queryState = queryState;
     this.queryPlan = queryPlan;
+    this.console = console;
   }
 
   public Hive getDb() {
@@ -83,4 +86,8 @@ public class DDLOperationContext {
   public QueryPlan getQueryPlan() {
     return queryPlan;
   }
+
+  public LogHelper getConsole() {
+    return console;
+  }
 }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLTask2.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLTask2.java
index 1f9a0bb..ecc5501 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLTask2.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/DDLTask2.java
@@ -66,7 +66,7 @@ public final class DDLTask2 extends Task<DDLWork2> implements 
Serializable {
 
       if (DESC_TO_OPARATION.containsKey(ddlDesc.getClass())) {
         DDLOperationContext context = new DDLOperationContext(conf, 
driverContext, this, (DDLWork2)work, queryState,
-            queryPlan);
+            queryPlan, console);
         Class<? extends DDLOperation> ddlOpertaionClass = 
DESC_TO_OPARATION.get(ddlDesc.getClass());
         Constructor<? extends DDLOperation> constructor =
             ddlOpertaionClass.getConstructor(DDLOperationContext.class, 
ddlDesc.getClass());
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
similarity index 58%
rename from ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java
rename to 
ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
index 9898b15..7f1aa0c 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DescFunctionDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionDesc.java
@@ -16,99 +16,56 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.function;
 
 import java.io.Serializable;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.ddl.DDLTask2;
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
 /**
- * DescFunctionDesc.
- *
+ * DDL task description for DESC FUNCTION commands.
  */
 @Explain(displayName = "Describe Function", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
-public class DescFunctionDesc extends DDLDesc implements Serializable {
+public class DescFunctionDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
-  String name;
-  String resFile;
-  boolean isExtended;
-
-  public boolean isExtended() {
-    return isExtended;
-  }
 
-  public void setExtended(boolean isExtended) {
-    this.isExtended = isExtended;
+  static {
+    DDLTask2.registerOperation(DescFunctionDesc.class, 
DescFunctionOperation.class);
   }
 
   /**
-   * table name for the result of show tables.
+   * Thrift ddl for the result of show tables.
    */
-  private static final String table = "show";
-  /**
-   * thrift ddl for the result of show tables.
-   */
-  private static final String schema = "tab_name#string";
-
-  public String getTable() {
-    return table;
-  }
-
-  public String getSchema() {
-    return schema;
+  private static final String SCHEMA = "tab_name#string";
+  public static String getSchema() {
+    return SCHEMA;
   }
 
-  public DescFunctionDesc() {
-  }
-
-  /**
-   * @param resFile
-   */
-  public DescFunctionDesc(Path resFile) {
-    this.resFile = resFile.toString();
-    name = null;
-  }
+  private final String resFile;
+  private final String name;
+  private final boolean isExtended;
 
-  /**
-   * @param name
-   *          of the function to describe
-   */
   public DescFunctionDesc(Path resFile, String name, boolean isExtended) {
-    this.isExtended = isExtended;
     this.resFile = resFile.toString();
     this.name = name;
+    this.isExtended = isExtended;
   }
 
-  /**
-   * @return the name
-   */
-  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
-  public String getName() {
-    return name;
-  }
-
-  /**
-   * @param name
-   *          is the function name
-   */
-  public void setName(String name) {
-    this.name = name;
-  }
-
-  /**
-   * @return the resFile
-   */
   @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFile() {
     return resFile;
   }
 
-  /**
-   * @param resFile
-   *          the resFile to set
-   */
-  public void setResFile(String resFile) {
-    this.resFile = resFile;
+  @Explain(displayName = "name", explainLevels = { Level.USER, Level.DEFAULT, 
Level.EXTENDED })
+  public String getName() {
+    return name;
+  }
+
+  public boolean isExtended() {
+    return isExtended;
   }
 }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionOperation.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionOperation.java
new file mode 100644
index 0000000..2165875
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/DescFunctionOperation.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
+
+import static org.apache.commons.lang.StringUtils.join;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.Set;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.parse.SemanticException;
+import org.apache.hive.common.util.AnnotationUtils;
+
+/**
+ * Operation process of describing a function.
+ */
+public class DescFunctionOperation extends DDLOperation {
+  private final DescFunctionDesc desc;
+
+  public DescFunctionOperation(DDLOperationContext context, DescFunctionDesc 
desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    try (DataOutputStream outStream = DDLUtils.getOutputStream(new 
Path(desc.getResFile()), context)) {
+      String funcName = desc.getName();
+      FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(funcName);
+      Class<?> funcClass = functionInfo == null ? null : 
functionInfo.getFunctionClass();
+      Description description = funcClass == null ? null : 
AnnotationUtils.getAnnotation(funcClass, Description.class);
+
+      printBaseInfo(outStream, funcName, funcClass, description);
+      outStream.write(Utilities.newLineCode);
+      printExtendedInfoIfRequested(outStream, functionInfo, funcClass);
+    } catch (IOException e) {
+      LOG.warn("describe function: ", e);
+      return 1;
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+
+    return 0;
+  }
+
+  private void printBaseInfo(DataOutputStream outStream, String funcName, 
Class<?> funcClass, Description description)
+      throws IOException, SemanticException {
+    if (funcClass == null) {
+      outStream.writeBytes("Function '" + funcName + "' does not exist.");
+    } else if (description == null) {
+      outStream.writeBytes("There is no documentation for function '" + 
funcName + "'");
+    } else {
+      outStream.writeBytes(description.value().replace("_FUNC_", funcName));
+      if (desc.isExtended()) {
+        Set<String> synonyms = FunctionRegistry.getFunctionSynonyms(funcName);
+        if (synonyms.size() > 0) {
+          outStream.writeBytes("\nSynonyms: " + join(synonyms, ", "));
+        }
+        if (description.extended().length() > 0) {
+          outStream.writeBytes("\n" + description.extended().replace("_FUNC_", 
funcName));
+        }
+      }
+    }
+  }
+
+
+  private void printExtendedInfoIfRequested(DataOutputStream outStream, 
FunctionInfo functionInfo, Class<?> funcClass)
+      throws IOException {
+    if (!desc.isExtended()) {
+      return;
+    }
+
+    if (funcClass != null) {
+      outStream.writeBytes("Function class:" + funcClass.getName() + "\n");
+    }
+
+    if (functionInfo != null) {
+      outStream.writeBytes("Function type:" + functionInfo.getFunctionType() + 
"\n");
+      FunctionResource[] resources = functionInfo.getResources();
+      if (resources != null) {
+        for (FunctionResource resource : resources) {
+          outStream.writeBytes("Resource:" + resource.getResourceURI() + "\n");
+        }
+      }
+    }
+  }
+}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java
similarity index 56%
rename from ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java
rename to 
ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java
index 609d174..2affa32 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowFunctionsDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsDesc.java
@@ -16,111 +16,63 @@
  * limitations under the License.
  */
 
-package org.apache.hadoop.hive.ql.plan;
+package org.apache.hadoop.hive.ql.ddl.function;
 
 import java.io.Serializable;
 
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.ddl.DDLDesc;
+import org.apache.hadoop.hive.ql.ddl.DDLTask2;
+import org.apache.hadoop.hive.ql.plan.Explain;
 import org.apache.hadoop.hive.ql.plan.Explain.Level;
 
-
 /**
- * ShowFunctionsDesc.
- *
+ * DDL task description for SHOW FUNCTIONS commands.
  */
 @Explain(displayName = "Show Functions", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
-public class ShowFunctionsDesc extends DDLDesc implements Serializable {
+public class ShowFunctionsDesc implements DDLDesc, Serializable {
   private static final long serialVersionUID = 1L;
-  String pattern;
-  String resFile;
-  /**
-   * whether like keyword is specified
-   */
-  private boolean isLikePattern = false;
-  /**
-   * table name for the result of show tables.
-   */
-  private static final String table = "show";
-  /**
-   * thrift ddl for the result of show tables.
-   */
-  private static final String schema = "tab_name#string";
 
-  public String getTable() {
-    return table;
+  static {
+    DDLTask2.registerOperation(ShowFunctionsDesc.class, 
ShowFunctionsOperation.class);
   }
 
-  public String getSchema() {
-    return schema;
+  /**
+   * Thrift ddl for the result of show tables.
+   */
+  private static final String SCHEMA = "tab_name#string";
+  public static String getSchema() {
+    return SCHEMA;
   }
 
-  public ShowFunctionsDesc() {
-  }
+  private final String resFile;
+  private final String pattern;
+  private final boolean isLikePattern;
 
-  /**
-   * @param resFile
-   */
   public ShowFunctionsDesc(Path resFile) {
-    this.resFile = resFile.toString();
-    pattern = null;
+    this(resFile, null, false);
   }
 
-  /**
-   * @param pattern
-   *          names of tables to show
-   */
   public ShowFunctionsDesc(Path resFile, String pattern) {
-    this.resFile = resFile.toString();
-    this.pattern = pattern;
+    this(resFile, pattern, false);
   }
 
-  /**
-   * @param pattern
-   *          names of tables to show
-   * @param isLikePattern
-   *          is like keyword used
-   */
   public ShowFunctionsDesc(Path resFile, String pattern, boolean 
isLikePattern) {
-    this(resFile, pattern);
-    this.isLikePattern = isLikePattern;
-  }
-
-
-  /**
-   * @return the pattern
-   */
-  @Explain(displayName = "pattern")
-  public String getPattern() {
-    return pattern;
-  }
-
-  /**
-   * @param pattern
-   *          the pattern to set
-   */
-  public void setPattern(String pattern) {
+    this.resFile = resFile.toString();
     this.pattern = pattern;
+    this.isLikePattern = isLikePattern;
   }
 
-  /**
-   * @return the resFile
-   */
   @Explain(displayName = "result file", explainLevels = { Level.EXTENDED })
   public String getResFile() {
     return resFile;
   }
 
-  /**
-   * @param resFile
-   *          the resFile to set
-   */
-  public void setResFile(String resFile) {
-    this.resFile = resFile;
+  @Explain(displayName = "pattern")
+  public String getPattern() {
+    return pattern;
   }
 
-  /**
-   * @return isLikePattern
-   */
   public boolean getIsLikePattern() {
     return isLikePattern;
   }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java
new file mode 100644
index 0000000..d76312d
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/ShowFunctionsOperation.java
@@ -0,0 +1,90 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.ddl.function;
+
+import org.apache.hadoop.hive.ql.ddl.DDLOperationContext;
+import org.apache.hadoop.hive.ql.ddl.DDLUtils;
+import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
+import org.apache.hadoop.hive.ql.exec.Utilities;
+
+import java.io.DataOutputStream;
+import java.io.IOException;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
+
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.ql.ddl.DDLOperation;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.serde.serdeConstants;
+
+/**
+ * Operation process of showing the functions.
+ */
+public class ShowFunctionsOperation extends DDLOperation {
+  private final ShowFunctionsDesc desc;
+
+  public ShowFunctionsOperation(DDLOperationContext context, ShowFunctionsDesc 
desc) {
+    super(context);
+    this.desc = desc;
+  }
+
+  @Override
+  public int execute() throws HiveException {
+    Set<String> funcs = fetchFunctions();
+    return printFunctions(funcs);
+  }
+
+  private Set<String> fetchFunctions() {
+    Set<String> funcs = null;
+    if (desc.getPattern() != null) {
+      LOG.debug("pattern: {}", desc.getPattern());
+      if (desc.getIsLikePattern()) {
+        funcs = 
FunctionRegistry.getFunctionNamesByLikePattern(desc.getPattern());
+      } else {
+        context.getConsole().printInfo("SHOW FUNCTIONS is deprecated, please 
use SHOW FUNCTIONS LIKE instead.");
+        funcs = FunctionRegistry.getFunctionNames(desc.getPattern());
+      }
+      LOG.info("Found {} function(s) matching the SHOW FUNCTIONS statement.", 
funcs.size());
+    } else {
+      funcs = FunctionRegistry.getFunctionNames();
+    }
+
+    return funcs;
+  }
+
+  private int printFunctions(Set<String> funcs) throws HiveException {
+    try (DataOutputStream outStream = DDLUtils.getOutputStream(new 
Path(desc.getResFile()), context)) {
+      SortedSet<String> sortedFuncs = new TreeSet<String>(funcs);
+      sortedFuncs.removeAll(serdeConstants.PrimitiveTypes);
+
+      for (String func : sortedFuncs) {
+        outStream.writeBytes(func);
+        outStream.write(Utilities.newLineCode);
+      }
+
+      return 0;
+    } catch (IOException e) {
+      LOG.warn("show function: ", e);
+      return 1;
+    } catch (Exception e) {
+      throw new HiveException(e);
+    }
+  }
+}
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/package-info.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/package-info.java
new file mode 100644
index 0000000..c16607d
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ddl/function/package-info.java
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/** Function related DDL operation descriptions and operations. */
+package org.apache.hadoop.hive.ql.ddl.function;
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index ed797fc..269cd85 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -18,8 +18,6 @@
 
 package org.apache.hadoop.hive.ql.exec;
 
-import static org.apache.commons.lang.StringUtils.join;
-
 import java.io.DataOutputStream;
 import java.io.FileNotFoundException;
 import java.io.IOException;
@@ -27,7 +25,6 @@ import java.io.OutputStreamWriter;
 import java.io.Serializable;
 import java.net.URI;
 import java.net.URISyntaxException;
-import java.sql.SQLException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Comparator;
@@ -95,7 +92,6 @@ import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.QueryPlan;
 import org.apache.hadoop.hive.ql.QueryState;
 import org.apache.hadoop.hive.ql.exec.ArchiveUtils.PartSpecInfo;
-import org.apache.hadoop.hive.ql.exec.FunctionInfo.FunctionResource;
 import org.apache.hadoop.hive.ql.exec.tez.TezSessionPoolManager;
 import org.apache.hadoop.hive.ql.exec.tez.TezTask;
 import org.apache.hadoop.hive.ql.exec.tez.WorkloadManager;
@@ -146,7 +142,6 @@ import 
org.apache.hadoop.hive.ql.plan.CreateOrDropTriggerToPoolMappingDesc;
 import org.apache.hadoop.hive.ql.plan.CreateResourcePlanDesc;
 import org.apache.hadoop.hive.ql.plan.CreateWMTriggerDesc;
 import org.apache.hadoop.hive.ql.plan.DDLWork;
-import org.apache.hadoop.hive.ql.plan.DescFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.DropPartitionDesc;
 import org.apache.hadoop.hive.ql.plan.DropResourcePlanDesc;
 import org.apache.hadoop.hive.ql.plan.DropWMMappingDesc;
@@ -174,7 +169,6 @@ import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
 import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowCompactionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowConfDesc;
-import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowGrantDesc;
 import org.apache.hadoop.hive.ql.plan.ShowLocksDesc;
 import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
@@ -195,7 +189,6 @@ import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObje
 import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveRoleGrant;
 import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveV1Authorizer;
 import org.apache.hadoop.hive.ql.session.SessionState;
-import org.apache.hadoop.hive.serde.serdeConstants;
 import org.apache.hadoop.hive.serde2.Deserializer;
 import org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe;
 import org.apache.hadoop.hive.serde2.avro.AvroSerdeUtils;
@@ -213,7 +206,6 @@ import org.apache.hadoop.io.IOUtils;
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.tools.HadoopArchives;
 import org.apache.hadoop.util.ToolRunner;
-import org.apache.hive.common.util.AnnotationUtils;
 import org.apache.hive.common.util.ReflectionUtil;
 import org.apache.thrift.TException;
 import org.slf4j.Logger;
@@ -326,21 +318,11 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
         return msck(db, msckDesc);
       }
 
-      DescFunctionDesc descFunc = work.getDescFunctionDesc();
-      if (descFunc != null) {
-        return describeFunction(db, descFunc);
-      }
-
       ShowColumnsDesc showCols = work.getShowColumnsDesc();
       if (showCols != null) {
         return showColumns(db, showCols);
       }
 
-      ShowFunctionsDesc showFuncs = work.getShowFuncsDesc();
-      if (showFuncs != null) {
-        return showFunctions(db, showFuncs);
-      }
-
       ShowLocksDesc showLocks = work.getShowLocksDesc();
       if (showLocks != null) {
         return showLocks(db, showLocks);
@@ -1971,59 +1953,6 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
   }
 
   /**
-   * Write a list of the user defined functions to a file.
-   * @param db
-   *
-   * @param showFuncs
-   *          are the functions we're interested in.
-   * @return Returns 0 when execution succeeds and above 0 if it fails.
-   * @throws HiveException
-   *           Throws this exception if an unexpected error occurs.
-   */
-  private int showFunctions(Hive db, ShowFunctionsDesc showFuncs) throws 
HiveException {
-    // get the tables for the desired patten - populate the output stream
-    Set<String> funcs = null;
-    if (showFuncs.getPattern() != null) {
-      LOG.debug("pattern: {}", showFuncs.getPattern());
-      if (showFuncs.getIsLikePattern()) {
-         funcs = 
FunctionRegistry.getFunctionNamesByLikePattern(showFuncs.getPattern());
-      } else {
-         console.printInfo("SHOW FUNCTIONS is deprecated, please use SHOW 
FUNCTIONS LIKE instead.");
-         funcs = FunctionRegistry.getFunctionNames(showFuncs.getPattern());
-      }
-      LOG.info("Found {} function(s) matching the SHOW FUNCTIONS statement.", 
funcs.size());
-    } else {
-      funcs = FunctionRegistry.getFunctionNames();
-    }
-
-    // write the results in the file
-    DataOutputStream outStream = getOutputStream(showFuncs.getResFile());
-    try {
-      SortedSet<String> sortedFuncs = new TreeSet<String>(funcs);
-      // To remove the primitive types
-      sortedFuncs.removeAll(serdeConstants.PrimitiveTypes);
-      Iterator<String> iterFuncs = sortedFuncs.iterator();
-
-      while (iterFuncs.hasNext()) {
-        // create a row per table name
-        outStream.writeBytes(iterFuncs.next());
-        outStream.write(terminator);
-      }
-    } catch (FileNotFoundException e) {
-      LOG.warn("show function: ", e);
-      return 1;
-    } catch (IOException e) {
-      LOG.warn("show function: ", e);
-      return 1;
-    } catch (Exception e) {
-      throw new HiveException(e);
-    } finally {
-      IOUtils.closeStream(outStream);
-    }
-    return 0;
-  }
-
-  /**
    * Write a list of the current locks to a file.
    * @param db
    *
@@ -2369,80 +2298,6 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
     return 0;
   }
 
-  /**
-   * Shows a description of a function.
-   * @param db
-   *
-   * @param descFunc
-   *          is the function we are describing
-   * @throws HiveException
-   */
-  private int describeFunction(Hive db, DescFunctionDesc descFunc) throws 
HiveException, SQLException {
-    String funcName = descFunc.getName();
-
-    // write the results in the file
-    DataOutputStream outStream = getOutputStream(descFunc.getResFile());
-    try {
-      // get the function documentation
-      Description desc = null;
-      Class<?> funcClass = null;
-      FunctionInfo functionInfo = FunctionRegistry.getFunctionInfo(funcName);
-      if (functionInfo != null) {
-        funcClass = functionInfo.getFunctionClass();
-      }
-      if (funcClass != null) {
-        desc = AnnotationUtils.getAnnotation(funcClass, Description.class);
-      }
-      if (desc != null) {
-        outStream.writeBytes(desc.value().replace("_FUNC_", funcName));
-        if (descFunc.isExtended()) {
-          Set<String> synonyms = 
FunctionRegistry.getFunctionSynonyms(funcName);
-          if (synonyms.size() > 0) {
-            outStream.writeBytes("\nSynonyms: " + join(synonyms, ", "));
-          }
-          if (desc.extended().length() > 0) {
-            outStream.writeBytes("\n"
-                + desc.extended().replace("_FUNC_", funcName));
-          }
-        }
-      } else {
-        if (funcClass != null) {
-          outStream.writeBytes("There is no documentation for function '"
-              + funcName + "'");
-        } else {
-          outStream.writeBytes("Function '" + funcName + "' does not exist.");
-        }
-      }
-
-      outStream.write(terminator);
-      if (descFunc.isExtended()) {
-        if (funcClass != null) {
-          outStream.writeBytes("Function class:" + funcClass.getName() + "\n");
-        }
-        if (functionInfo != null) {
-          outStream.writeBytes("Function type:" + 
functionInfo.getFunctionType() + "\n");
-          FunctionResource[] resources = functionInfo.getResources();
-          if (resources != null) {
-            for (FunctionResource resource : resources) {
-              outStream.writeBytes("Resource:" + resource.getResourceURI() + 
"\n");
-            }
-          }
-        }
-      }
-    } catch (FileNotFoundException e) {
-      LOG.warn("describe function: ", e);
-      return 1;
-    } catch (IOException e) {
-      LOG.warn("describe function: ", e);
-      return 1;
-    } catch (Exception e) {
-      throw new HiveException(e);
-    } finally {
-      IOUtils.closeStream(outStream);
-    }
-    return 0;
-  }
-
   private void writeToFile(String data, String file) throws IOException {
     Path resFile = new Path(file);
     FileSystem fs = resFile.getFileSystem(conf);
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 0828fad..d187d19 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -81,6 +81,8 @@ import 
org.apache.hadoop.hive.ql.ddl.database.ShowCreateDatabaseDesc;
 import org.apache.hadoop.hive.ql.ddl.database.ShowDatabasesDesc;
 import org.apache.hadoop.hive.ql.ddl.database.SwitchDatabaseDesc;
 import org.apache.hadoop.hive.ql.ddl.database.UnlockDatabaseDesc;
+import org.apache.hadoop.hive.ql.ddl.function.DescFunctionDesc;
+import org.apache.hadoop.hive.ql.ddl.function.ShowFunctionsDesc;
 import org.apache.hadoop.hive.ql.ddl.table.DescTableDesc;
 import org.apache.hadoop.hive.ql.ddl.table.DropTableDesc;
 import org.apache.hadoop.hive.ql.ddl.table.LockTableDesc;
@@ -139,7 +141,6 @@ import org.apache.hadoop.hive.ql.plan.CreateWMTriggerDesc;
 import org.apache.hadoop.hive.ql.plan.DDLDesc;
 import org.apache.hadoop.hive.ql.plan.DDLDesc.DDLDescWithWriteId;
 import org.apache.hadoop.hive.ql.plan.DDLWork;
-import org.apache.hadoop.hive.ql.plan.DescFunctionDesc;
 import org.apache.hadoop.hive.ql.plan.DropPartitionDesc;
 import org.apache.hadoop.hive.ql.plan.DropResourcePlanDesc;
 import org.apache.hadoop.hive.ql.plan.DropWMMappingDesc;
@@ -162,7 +163,6 @@ import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
 import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowCompactionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowConfDesc;
-import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowGrantDesc;
 import org.apache.hadoop.hive.ql.plan.ShowLocksDesc;
 import org.apache.hadoop.hive.ql.plan.ShowPartitionsDesc;
@@ -2800,9 +2800,8 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     } else {
       showFuncsDesc = new ShowFunctionsDesc(ctx.getResFile());
     }
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-        showFuncsDesc)));
-    setFetchTask(createFetchTask(showFuncsDesc.getSchema()));
+    rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), 
showFuncsDesc)));
+    setFetchTask(createFetchTask(ShowFunctionsDesc.getSchema()));
   }
 
   /**
@@ -3153,11 +3152,9 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       throw new SemanticException("Unexpected Tokens at DESCRIBE FUNCTION");
     }
 
-    DescFunctionDesc descFuncDesc = new DescFunctionDesc(ctx.getResFile(),
-        funcName, isExtended);
-    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
-        descFuncDesc)));
-    setFetchTask(createFetchTask(descFuncDesc.getSchema()));
+    DescFunctionDesc descFuncDesc = new DescFunctionDesc(ctx.getResFile(), 
funcName, isExtended);
+    rootTasks.add(TaskFactory.get(new DDLWork2(getInputs(), getOutputs(), 
descFuncDesc)));
+    setFetchTask(createFetchTask(DescFunctionDesc.getSchema()));
   }
 
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
index 95028cc..c3863e1 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
@@ -37,12 +37,10 @@ public class DDLWork implements Serializable {
   private DropPartitionDesc dropPartitionDesc;
   private AlterTableDesc alterTblDesc;
   private ShowColumnsDesc showColumnsDesc;
-  private ShowFunctionsDesc showFuncsDesc;
   private ShowLocksDesc showLocksDesc;
   private ShowCompactionsDesc showCompactionsDesc;
   private ShowTxnsDesc showTxnsDesc;
   private AbortTxnsDesc abortTxnsDesc;
-  private DescFunctionDesc descFunctionDesc;
   private ShowPartitionsDesc showPartsDesc;
   private AddPartitionDesc addPartitionDesc;
   private RenamePartitionDesc renamePartitionDesc;
@@ -134,16 +132,6 @@ public class DDLWork implements Serializable {
   }
 
   /**
-   * @param showFuncsDesc
-   */
-  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
-      ShowFunctionsDesc showFuncsDesc) {
-    this(inputs, outputs);
-
-    this.showFuncsDesc = showFuncsDesc;
-  }
-
-  /**
    * @param showLocksDesc
    */
   public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
@@ -171,16 +159,6 @@ public class DDLWork implements Serializable {
     this.abortTxnsDesc = abortTxnsDesc;
   }
 
-   /**
-   * @param descFuncDesc
-   */
-  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
-      DescFunctionDesc descFuncDesc) {
-    this(inputs, outputs);
-
-    descFunctionDesc = descFuncDesc;
-  }
-
   /**
    * @param showPartsDesc
    */
@@ -402,14 +380,6 @@ public class DDLWork implements Serializable {
   }
 
   /**
-   * @return the showFuncsDesc
-   */
-  @Explain(displayName = "Show Function Operator", explainLevels = { 
Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public ShowFunctionsDesc getShowFuncsDesc() {
-    return showFuncsDesc;
-  }
-
-  /**
    * @return the showLocksDesc
    */
   @Explain(displayName = "Show Lock Operator", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
@@ -432,14 +402,6 @@ public class DDLWork implements Serializable {
     return abortTxnsDesc;
   }
 
-  /**
-   * @return the descFuncDesc
-   */
-  @Explain(displayName = "Show Function Operator", explainLevels = { 
Level.USER, Level.DEFAULT, Level.EXTENDED })
-  public DescFunctionDesc getDescFunctionDesc() {
-    return descFunctionDesc;
-  }
-
   @Explain(displayName = "Kill Query Operator", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
   public KillQueryDesc getKillQueryDesc() {
     return killQueryDesc;
diff --git a/ql/src/test/queries/clientpositive/desc_function.q 
b/ql/src/test/queries/clientpositive/desc_function.q
new file mode 100644
index 0000000..d055d9c
--- /dev/null
+++ b/ql/src/test/queries/clientpositive/desc_function.q
@@ -0,0 +1,5 @@
+EXPLAIN DESC FUNCTION replace;
+DESC FUNCTION replace;
+
+EXPLAIN DESC FUNCTION EXTENDED replace;
+DESC FUNCTION EXTENDED replace;
diff --git a/ql/src/test/queries/clientpositive/show_functions.q 
b/ql/src/test/queries/clientpositive/show_functions.q
index 106e05f..6ef0078 100644
--- a/ql/src/test/queries/clientpositive/show_functions.q
+++ b/ql/src/test/queries/clientpositive/show_functions.q
@@ -1,5 +1,7 @@
+EXPLAIN SHOW FUNCTIONS;
 SHOW FUNCTIONS;
 
+EXPLAIN SHOW FUNCTIONS '^c.*';
 SHOW FUNCTIONS '^c.*';
 
 SHOW FUNCTIONS '.*e$';
@@ -10,6 +12,7 @@ SHOW FUNCTIONS '.*date.*';
 
 SHOW FUNCTIONS '***';
 
+EXPLAIN SHOW FUNCTIONS LIKE 'When';
 SHOW FUNCTIONS LIKE 'When';
 
 SHOW FUNCTIONS LIKE 'max|min';
diff --git a/ql/src/test/results/clientpositive/desc_function.q.out 
b/ql/src/test/results/clientpositive/desc_function.q.out
new file mode 100644
index 0000000..1f804bb
--- /dev/null
+++ b/ql/src/test/results/clientpositive/desc_function.q.out
@@ -0,0 +1,53 @@
+PREHOOK: query: EXPLAIN DESC FUNCTION replace
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: EXPLAIN DESC FUNCTION replace
+POSTHOOK: type: DESCFUNCTION
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-0
+    Describe Function
+      name: replace
+
+  Stage: Stage-1
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: DESC FUNCTION replace
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESC FUNCTION replace
+POSTHOOK: type: DESCFUNCTION
+replace(str, search, rep) - replace all substrings of 'str' that match 
'search' with 'rep'
+PREHOOK: query: EXPLAIN DESC FUNCTION EXTENDED replace
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: EXPLAIN DESC FUNCTION EXTENDED replace
+POSTHOOK: type: DESCFUNCTION
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-0
+    Describe Function
+      name: replace
+
+  Stage: Stage-1
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
+PREHOOK: query: DESC FUNCTION EXTENDED replace
+PREHOOK: type: DESCFUNCTION
+POSTHOOK: query: DESC FUNCTION EXTENDED replace
+POSTHOOK: type: DESCFUNCTION
+replace(str, search, rep) - replace all substrings of 'str' that match 
'search' with 'rep'
+Example:
+  > SELECT replace('Hack and Hue', 'H', 'BL') FROM src LIMIT 1;
+  'BLack and BLue'
+Function class:org.apache.hadoop.hive.ql.udf.UDFReplace
+Function type:BUILTIN
diff --git a/ql/src/test/results/clientpositive/show_functions.q.out 
b/ql/src/test/results/clientpositive/show_functions.q.out
index 4e44753..374e9c4 100644
--- a/ql/src/test/results/clientpositive/show_functions.q.out
+++ b/ql/src/test/results/clientpositive/show_functions.q.out
@@ -1,3 +1,21 @@
+PREHOOK: query: EXPLAIN SHOW FUNCTIONS
+PREHOOK: type: SHOWFUNCTIONS
+POSTHOOK: query: EXPLAIN SHOW FUNCTIONS
+POSTHOOK: type: SHOWFUNCTIONS
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-0
+    Show Functions
+
+  Stage: Stage-1
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
 PREHOOK: query: SHOW FUNCTIONS
 PREHOOK: type: SHOWFUNCTIONS
 POSTHOOK: query: SHOW FUNCTIONS
@@ -306,6 +324,25 @@ xpath_string
 year
 |
 ~
+PREHOOK: query: EXPLAIN SHOW FUNCTIONS '^c.*'
+PREHOOK: type: SHOWFUNCTIONS
+POSTHOOK: query: EXPLAIN SHOW FUNCTIONS '^c.*'
+POSTHOOK: type: SHOWFUNCTIONS
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-0
+    Show Functions
+      pattern: ^c.*
+
+  Stage: Stage-1
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
 PREHOOK: query: SHOW FUNCTIONS '^c.*'
 PREHOOK: type: SHOWFUNCTIONS
 POSTHOOK: query: SHOW FUNCTIONS '^c.*'
@@ -414,6 +451,25 @@ PREHOOK: query: SHOW FUNCTIONS '***'
 PREHOOK: type: SHOWFUNCTIONS
 POSTHOOK: query: SHOW FUNCTIONS '***'
 POSTHOOK: type: SHOWFUNCTIONS
+PREHOOK: query: EXPLAIN SHOW FUNCTIONS LIKE 'When'
+PREHOOK: type: SHOWFUNCTIONS
+POSTHOOK: query: EXPLAIN SHOW FUNCTIONS LIKE 'When'
+POSTHOOK: type: SHOWFUNCTIONS
+STAGE DEPENDENCIES:
+  Stage-0 is a root stage
+  Stage-1 depends on stages: Stage-0
+
+STAGE PLANS:
+  Stage: Stage-0
+    Show Functions
+      pattern: When
+
+  Stage: Stage-1
+    Fetch Operator
+      limit: -1
+      Processor Tree:
+        ListSink
+
 PREHOOK: query: SHOW FUNCTIONS LIKE 'When'
 PREHOOK: type: SHOWFUNCTIONS
 POSTHOOK: query: SHOW FUNCTIONS LIKE 'When'

Reply via email to