This is an automated email from the ASF dual-hosted git repository.

sankarh pushed a commit to branch branch-3
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/branch-3 by this push:
     new 5f0df2d9b25 HIVE-27785: Backport of HIVE-20467, HIVE-20508, HIVE-20550 
to branch-3 (#4790)
5f0df2d9b25 is described below

commit 5f0df2d9b253e63f5105e75fb39398025efd84dd
Author: Aman Raj <104416558+amanraj2...@users.noreply.github.com>
AuthorDate: Fri Oct 20 12:57:48 2023 +0530

    HIVE-27785: Backport of HIVE-20467, HIVE-20508, HIVE-20550 to branch-3 
(#4790)
    
    * HIVE-20467: Allow IF NOT EXISTS/IF EXISTS in Resource plan creation/drop
    * HIVE-20508: Hive does not support user names of type "user@realm" (Deepak 
Jaiswal, reviewed by Thejas Nair)
    * HIVE-20550: Switch WebHCat to use beeline to submit Hive queries (Daniel 
Dai, reviewed by Thejas Nair)
    
    ---------
    Co-authored-by: Miklos Gergely <mgerg...@hortonworks.com>
    Co-authored-by: Deepak Jaiswal <djais...@apache.org>
    Co-authored-by: Daniel Dai <dai...@gmail.com>
    
    Signed-off-by: Sankar Hariappan <sank...@apache.org>
    Closes (#4790)
---
 .../java/org/apache/hadoop/hive/conf/HiveConf.java |  2 +
 .../test/e2e/templeton/drivers/TestDriverCurl.pm   |  6 +--
 .../test/e2e/templeton/tests/jobsubmission.conf    |  6 +--
 .../hive/hcatalog/templeton/DeleteDelegator.java   | 59 +++++++++++++++++++---
 .../hive/hcatalog/templeton/HiveDelegator.java     | 25 +++------
 .../hive/hcatalog/templeton/JsonBuilder.java       |  2 +-
 .../hive/hcatalog/templeton/tool/JobState.java     | 13 +++++
 .../templeton/tool/JobSubmissionConstants.java     |  3 ++
 .../hive/hcatalog/templeton/tool/LaunchMapper.java | 23 ++++++---
 .../hcatalog/templeton/tool/TempletonUtils.java    |  6 +++
 .../templeton/tool/TestTempletonUtils.java         |  3 ++
 .../java/org/apache/hadoop/hive/ql/ErrorMsg.java   |  5 +-
 .../org/apache/hadoop/hive/ql/exec/DDLTask.java    |  6 +--
 .../org/apache/hadoop/hive/ql/metadata/Hive.java   | 12 ++++-
 .../hadoop/hive/ql/parse/DDLSemanticAnalyzer.java  | 18 ++++++-
 .../hadoop/hive/ql/parse/ResourcePlanParser.g      |  8 +--
 .../hive/ql/plan/CreateResourcePlanDesc.java       | 11 +++-
 .../hadoop/hive/ql/plan/DropResourcePlanDesc.java  | 14 ++++-
 ql/src/test/queries/clientpositive/resourceplan.q  | 10 ++++
 .../results/clientpositive/llap/resourceplan.q.out | 18 +++++++
 .../hive/service/cli/thrift/ThriftCLIService.java  |  6 ++-
 21 files changed, 202 insertions(+), 54 deletions(-)

diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 6bd226c442f..deed2a66d64 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -2814,6 +2814,8 @@ public class HiveConf extends Configuration {
         "hive.test.authz.sstd.hs2.mode", false, "test hs2 mode from .q tests", 
true),
     HIVE_AUTHORIZATION_ENABLED("hive.security.authorization.enabled", false,
         "enable or disable the Hive client authorization"),
+    
HIVE_AUTHORIZATION_KERBEROS_USE_SHORTNAME("hive.security.authorization.kerberos.use.shortname",
 true,
+        "use short name in Kerberos cluster"),
     HIVE_AUTHORIZATION_MANAGER("hive.security.authorization.manager",
         
"org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory",
         "The Hive client authorization manager class name. The user defined 
authorization class should implement \n" +
diff --git a/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm 
b/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
index 66a6ca14438..e62269b27f0 100644
--- a/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
+++ b/hcatalog/src/test/e2e/templeton/drivers/TestDriverCurl.pm
@@ -555,12 +555,12 @@ sub execCurlCmd(){
     my %result;
     my $out;
     my $err;
-    IPC::Run::run(\@curl_cmd, \undef, $out, $err)
+    IPC::Run::run(\@curl_cmd, \undef, $log, $log)
         or die "Failed running curl cmd " . join ' ', @curl_cmd;
 
     $result{'rc'} = $? >> 8;
-    $result{'stderr'} = $err;
-    $result{'stdout'} = $out;
+    $result{'stderr'} = $log;
+    $result{'stdout'} = $log;
     $result{'body'} = `cat $res_body`;
 
     my @full_header = `cat $res_header`;
diff --git a/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf 
b/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf
index a1b02844216..824eb922a94 100644
--- a/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf
+++ b/hcatalog/src/test/e2e/templeton/tests/jobsubmission.conf
@@ -324,7 +324,7 @@ $cfg =
                                 #results
      'status_code' => 200,
      'check_job_created' => 1,
-     'check_job_exit_value' => 64,
+     'check_job_exit_value' => 1,
 
     },
  
@@ -443,7 +443,7 @@ $cfg =
      'num' => 9,
      'method' => 'POST',
      'url' => ':TEMPLETON_URL:/templeton/v1/hive?user.name=:UNAME:',
-     'post_options' => ['execute=add jar piggybank.jar', 
'files=:INPDIR_HDFS:/piggybank.jar',],
+     'post_options' => ['execute=add jar :INPDIR_HDFS:/piggybank.jar',],
      'json_field_substr_match' => { 'id' => '\d+'},
                                 #results
      'status_code' => 200,
@@ -499,7 +499,7 @@ $cfg =
     {
                                 #enable logs
      'num' => 13,
-     'ignore23' => 'Log collector does not work with Hadoop 2',
+     'ignore' => 'Log collector does not work with Hadoop 2/3',
      'method' => 'POST',
      'url' => ':TEMPLETON_URL:/templeton/v1/hive?user.name=:UNAME:',
      'post_options' => ['execute=select a,b from mynums', 
'statusdir=:OUTDIR:/status', 'enablelog=true'],
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java
index 049c9a48aca..5afd1b9eb7b 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/DeleteDelegator.java
@@ -19,8 +19,16 @@
 package org.apache.hive.hcatalog.templeton;
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.regex.Pattern;
 
+import org.apache.commons.io.IOUtils;
+import org.apache.hive.hcatalog.templeton.tool.TempletonUtils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 import org.apache.hadoop.hive.shims.HadoopShims.WebHCatJTShim;
@@ -39,6 +47,36 @@ public class DeleteDelegator extends TempletonDelegator {
     super(appConf);
   }
 
+  private String runProgram(String[] cmd) throws IOException, 
InterruptedException {
+    ProcessBuilder pb = new ProcessBuilder(cmd);
+    Set<String> keys = new HashSet<String>(pb.environment().keySet());
+    for (String key : keys) {
+      pb.environment().remove(key);
+    }
+    Process p = pb.start();
+    String stdout = IOUtils.toString(p.getInputStream());
+    String stderr = IOUtils.toString(p.getErrorStream());
+    int code = p.waitFor();
+    if (code != 0) {
+      throw new IOException("non-zero exit code " + code + " when running " + 
Arrays.toString(cmd) + "\n"
+              + "stdout: " + stdout + "\n" + "stderr: " + stderr + "\n");
+    }
+    return stdout;
+  }
+
+  private void killHiveQuery(String user, String tag) throws IOException, 
InterruptedException {
+    String[] cmd = new String[] {appConf.hivePath(), 
"--getUrlsFromBeelineSite"};
+    String urlsString = runProgram(cmd);
+    String[] urls = urlsString.substring(6).split(",");
+    for (String url : urls) {
+      if (url != null && !url.trim().isEmpty()) {
+        cmd = new String[]{appConf.hivePath(), "-u", "jdbc:hive2://" + url, 
"-n", user,
+                "-e", "kill query '" + tag + "'"};
+        runProgram(cmd);
+      }
+    }
+  }
+
   public QueueStatusBean run(String user, String id)
     throws NotAuthorizedException, BadParam, IOException, InterruptedException
   {
@@ -53,13 +91,20 @@ public class DeleteDelegator extends TempletonDelegator {
         throw new BadParam("Invalid jobid: " + id);
       tracker.killJob(jobid);
       state = new JobState(id, Main.getAppConfigInstance());
-      List<JobState> children = state.getChildren();
-      if (children != null) {
-        for (JobState child : children) {
-          try {
-            tracker.killJob(StatusDelegator.StringToJobID(child.getId()));
-          } catch (IOException e) {
-            LOG.warn("templeton: fail to kill job " + child.getId());
+      if (state.getJobType() != null) {
+        LauncherDelegator.JobType jobType = 
LauncherDelegator.JobType.valueOf(state.getJobType());
+        if (jobType == LauncherDelegator.JobType.HIVE) {
+          killHiveQuery(user, jobid.toString());
+        } else {
+          List<JobState> children = state.getChildren();
+          if (children != null) {
+            for (JobState child : children) {
+              try {
+                tracker.killJob(StatusDelegator.StringToJobID(child.getId()));
+              } catch (IOException e) {
+                LOG.warn("templeton: fail to kill job " + child.getId());
+              }
+            }
           }
         }
       }
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
index a3f57dff3e1..3f1968d7f15 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/HiveDelegator.java
@@ -52,13 +52,13 @@ public class HiveDelegator extends LauncherDelegator {
     ExecuteException, IOException, InterruptedException, 
TooManyRequestsException
   {
     runAs = user;
-    List<String> args = makeArgs(execute, srcFile, defines, hiveArgs, 
otherFiles, statusdir,
+    List<String> args = makeArgs(user, execute, srcFile, defines, hiveArgs, 
otherFiles, statusdir,
                    completedUrl, enablelog, enableJobReconnect);
 
     return enqueueController(user, userArgs, callback, args);
   }
 
-  private List<String> makeArgs(String execute, String srcFile,
+  private List<String> makeArgs(String user, String execute, String srcFile,
              List<String> defines, List<String> hiveArgs, String otherFiles,
              String statusdir, String completedUrl, boolean enablelog,
              Boolean enableJobReconnect)
@@ -73,26 +73,15 @@ public class HiveDelegator extends LauncherDelegator {
       
       args.add(appConf.hivePath());
 
-      args.add("--service");
-      args.add("cli");
-
-      //the token file location as initial hiveconf arg
-      args.add("--hiveconf");
-      args.add(TempletonControllerJob.TOKEN_FILE_ARG_PLACEHOLDER);
-
-      //this is needed specifcally for Hive on Tez (in addition to
-      //JobSubmissionConstants.TOKEN_FILE_ARG_PLACEHOLDER)
-      args.add("--hiveconf");
-      args.add(JobSubmissionConstants.TOKEN_FILE_ARG_PLACEHOLDER_TEZ);
+      args.add("-n");
+      args.add(user);
+      args.add("-p");
+      args.add("default");
 
       //add mapreduce job tag placeholder
       args.add("--hiveconf");
-      args.add(TempletonControllerJob.MAPREDUCE_JOB_TAGS_ARG_PLACEHOLDER);
+      args.add(TempletonControllerJob.HIVE_QUERY_TAG_ARG_PLACEHOLDER);
 
-      for (String prop : appConf.hiveProps()) {
-        args.add("--hiveconf");
-        args.add(prop);
-      }
       for (String prop : defines) {
         args.add("--hiveconf");
         args.add(prop);
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JsonBuilder.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JsonBuilder.java
index 6a38719525f..dfcdaef3e93 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JsonBuilder.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/JsonBuilder.java
@@ -49,7 +49,7 @@ public class JsonBuilder {
     hiveError2HttpStatusCode.put(ErrorMsg.INVALID_PARTITION.getErrorCode(), 
HttpStatus.NOT_FOUND_404);
 
     
hiveError2HttpStatusCode.put(ErrorMsg.DUPLICATE_COLUMN_NAMES.getErrorCode(), 
HttpStatus.CONFLICT_409);
-    
hiveError2HttpStatusCode.put(ErrorMsg.DATABSAE_ALREADY_EXISTS.getErrorCode(), 
HttpStatus.CONFLICT_409);
+    
hiveError2HttpStatusCode.put(ErrorMsg.DATABASE_ALREADY_EXISTS.getErrorCode(), 
HttpStatus.CONFLICT_409);
     hiveError2HttpStatusCode.put(ErrorMsg.PARTITION_EXISTS.getErrorCode(), 
HttpStatus.CONFLICT_409);
     hiveError2HttpStatusCode.put(ErrorMsg.TABLE_ALREADY_EXISTS.getErrorCode(), 
HttpStatus.CONFLICT_409);
   }
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java
index 74cf1e5e838..52738b760f6 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobState.java
@@ -126,6 +126,19 @@ public class JobState {
     setField("percentComplete", percent);
   }
 
+  /**
+   * The job type
+   */
+  public String getJobType()
+          throws IOException {
+    return getField("jobType");
+  }
+
+  public void setJobType(String jobType)
+          throws IOException {
+    setField("jobType", jobType);
+  }
+
   /**
    * Add a jobid to the list of children of this job.
    *
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
index f3a79f63052..9e90b8de502 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/JobSubmissionConstants.java
@@ -54,8 +54,11 @@ public interface JobSubmissionConstants {
   // previously running child jobs can be killed before the child job is 
launched
   // again.
   public static final String MAPREDUCE_JOB_TAGS = "mapreduce.job.tags";
+  public static final String HIVE_QUERY_TAG = "hive.query.tag";
   public static final String MAPREDUCE_JOB_TAGS_ARG_PLACEHOLDER =
     "__MR_JOB_TAGS_OPTION=MR_JOB_TAGS_JOBID__";
+  public static final String HIVE_QUERY_TAG_ARG_PLACEHOLDER =
+          "__HIVE_QUERY_TAG_OPTION=HIVE_QUERY_TAG_JOBID__";
 
   public static final String HADOOP_CLASSPATH = "HADOOP_CLASSPATH";
   /**
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
index 5de04e35904..b1f4a6ac5e7 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/LaunchMapper.java
@@ -153,7 +153,8 @@ public class LaunchMapper extends Mapper<NullWritable, 
NullWritable, Text, Text>
       env.put(pathVarName, paths);
     }
   }
-  protected Process startJob(Configuration conf, String jobId, String user, 
String overrideClasspath)
+  protected Process startJob(Configuration conf, String jobId, String user, 
String overrideClasspath,
+                             LauncherDelegator.JobType jobType)
     throws IOException, InterruptedException {
 
     copyLocal(COPY_NAME, conf);
@@ -172,8 +173,14 @@ public class LaunchMapper extends Mapper<NullWritable, 
NullWritable, Text, Text>
     List<String> jarArgsList = new LinkedList<String>(Arrays.asList(jarArgs));
     handleTokenFile(jarArgsList, 
JobSubmissionConstants.TOKEN_FILE_ARG_PLACEHOLDER, 
"mapreduce.job.credentials.binary");
     handleTokenFile(jarArgsList, 
JobSubmissionConstants.TOKEN_FILE_ARG_PLACEHOLDER_TEZ, "tez.credentials.path");
-    handleMapReduceJobTag(jarArgsList, 
JobSubmissionConstants.MAPREDUCE_JOB_TAGS_ARG_PLACEHOLDER,
-        JobSubmissionConstants.MAPREDUCE_JOB_TAGS, jobId);
+    if (jobType == LauncherDelegator.JobType.HIVE) {
+      replaceJobTag(jarArgsList, 
JobSubmissionConstants.HIVE_QUERY_TAG_ARG_PLACEHOLDER,
+              JobSubmissionConstants.HIVE_QUERY_TAG, jobId);
+    } else {
+      replaceJobTag(jarArgsList, 
JobSubmissionConstants.MAPREDUCE_JOB_TAGS_ARG_PLACEHOLDER,
+              JobSubmissionConstants.MAPREDUCE_JOB_TAGS, jobId);
+    }
+
     return TrivialExecService.getInstance().run(jarArgsList, removeEnv, env);
   }
 
@@ -245,11 +252,11 @@ public class LaunchMapper extends Mapper<NullWritable, 
NullWritable, Text, Text>
   }
 
   /**
-   * Replace the placeholder mapreduce tags with our MR jobid so that all 
child jobs
+   * Replace the placeholder tags with our MR jobid so that all child jobs or 
hive queries are
    * get tagged with it. This is used on launcher task restart to prevent from 
having
    * same jobs running in parallel.
    */
-  private static void handleMapReduceJobTag(List<String> jarArgsList, String 
placeholder,
+  private static void replaceJobTag(List<String> jarArgsList, String 
placeholder,
       String mapReduceJobTagsProp, String currentJobId) throws IOException {
     String arg = String.format("%s=%s", mapReduceJobTagsProp, currentJobId);
     for(int i = 0; i < jarArgsList.size(); i++) {
@@ -401,8 +408,12 @@ public class LaunchMapper extends Mapper<NullWritable, 
NullWritable, Text, Text>
     Process proc = startJob(conf,
             context.getJobID().toString(),
             conf.get("user.name"),
-            conf.get(OVERRIDE_CLASSPATH));
+            conf.get(OVERRIDE_CLASSPATH),
+            jobType);
 
+    JobState state = new JobState(context.getJobID().toString(), conf);
+    state.setJobType(jobType.toString());
+    state.close();
     ExecutorService pool = Executors.newCachedThreadPool();
     executeWatcher(pool, conf, context.getJobID(),
             proc.getInputStream(), statusdir, STDOUT_FNAME);
diff --git 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java
 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java
index df8c32efb69..29499a289e8 100644
--- 
a/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java
+++ 
b/hcatalog/webhcat/svr/src/main/java/org/apache/hive/hcatalog/templeton/tool/TempletonUtils.java
@@ -115,6 +115,7 @@ public class TempletonUtils {
    * groups that don't add information such as "Map 1: -/-"
    */
   public static final Pattern HIVE_TEZ_COMPLETE = 
Pattern.compile("(Map|Reducer) (\\d+:) (\\d+(\\(\\+\\d+\\))?/\\d+)");
+  public static final Pattern HIVE_BEELINE_COMPLETE = 
Pattern.compile("VERTICES: .* (\\d+%)");
   /**
    * Pig on Tez produces progress report that looks like this
    * DAG Status: status=RUNNING, progress=TotalTasks: 3 Succeeded: 0 Running: 
0 Failed: 0 Killed: 0
@@ -139,6 +140,11 @@ public class TempletonUtils {
     if (pig.find())
       return pig.group().trim();
 
+    Matcher beeline = HIVE_BEELINE_COMPLETE.matcher(line);
+    if (beeline.find()) {
+      return beeline.group(1).trim() + " complete";
+    }
+
     Matcher hive = HIVE_COMPLETE.matcher(line);
     if(hive.find()) {
       return "map " + hive.group(1) + " reduce " + hive.group(2);
diff --git 
a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
 
b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
index 51ed86741e5..cf48221c81d 100644
--- 
a/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
+++ 
b/hcatalog/webhcat/svr/src/test/java/org/apache/hive/hcatalog/templeton/tool/TestTempletonUtils.java
@@ -90,6 +90,9 @@ public class TestTempletonUtils {
 
     String fifty = "2011-12-15 18:12:36,333 [main] INFO  
org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MapReduceLauncher 
- 50% complete";
     Assert.assertEquals("50% complete", 
TempletonUtils.extractPercentComplete(fifty));
+
+    String beeline = "VERTICES: 01/02  [==========================>>] 70%  
ELAPSED TIME: 3.79 s";
+    Assert.assertEquals("70% complete", 
TempletonUtils.extractPercentComplete(beeline));
   }
 
   @Test
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java 
b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index 90c6d22252d..e0f8b5251be 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -334,7 +334,7 @@ public enum ErrorMsg {
       "A column on which a partition/table is list bucketed cannot be 
truncated."),
 
   TABLE_NOT_PARTITIONED(10241, "Table {0} is not a partitioned table", true),
-  DATABSAE_ALREADY_EXISTS(10242, "Database {0} already exists", true),
+  DATABASE_ALREADY_EXISTS(10242, "Database {0} already exists", true),
   CANNOT_REPLACE_COLUMNS(10243, "Replace columns is not supported for table 
{0}. SerDe may be incompatible.", true),
   BAD_LOCATION_VALUE(10244, "{0}  is not absolute.  Please specify a complete 
absolute uri."),
   UNSUPPORTED_ALTER_TBL_OP(10245, "{0} alter table options is not supported"),
@@ -467,7 +467,8 @@ public enum ErrorMsg {
     "insert-only transactional", true),
   LOAD_DATA_LAUNCH_JOB_IO_ERROR(10415, "Encountered I/O error while parsing 
rewritten load data into insert query"),
   LOAD_DATA_LAUNCH_JOB_PARSE_ERROR(10416, "Encountered parse error while 
parsing rewritten load data into insert query"),
-
+  RESOURCE_PLAN_ALREADY_EXISTS(10417, "Resource plan {0} already exists", 
true),
+  RESOURCE_PLAN_NOT_EXISTS(10418, "Resource plan {0} does not exist", true),
 
   //========================== 20000 range starts here 
========================//
 
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
index 815916c0926..f008e61b8ed 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
@@ -677,7 +677,7 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
   private int createResourcePlan(Hive db, CreateResourcePlanDesc 
createResourcePlanDesc)
       throws HiveException {
     db.createResourcePlan(createResourcePlanDesc.getResourcePlan(),
-        createResourcePlanDesc.getCopyFromName());
+        createResourcePlanDesc.getCopyFromName(), 
createResourcePlanDesc.getIfNotExists());
     return 0;
   }
 
@@ -787,7 +787,7 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
   }
 
   private int dropResourcePlan(Hive db, DropResourcePlanDesc desc) throws 
HiveException {
-    db.dropResourcePlan(desc.getRpName());
+    db.dropResourcePlan(desc.getRpName(), desc.getIfExists());
     return 0;
   }
 
@@ -4849,7 +4849,7 @@ public class DDLTask extends Task<DDLWork> implements 
Serializable {
     }
     catch (AlreadyExistsException ex) {
       //it would be better if AlreadyExistsException had an errorCode field....
-      throw new HiveException(ex, ErrorMsg.DATABSAE_ALREADY_EXISTS, 
crtDb.getName());
+      throw new HiveException(ex, ErrorMsg.DATABASE_ALREADY_EXISTS, 
crtDb.getName());
     }
     return 0;
   }
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
index 74dbdfb9a95..b9fe739b7b8 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java
@@ -5402,10 +5402,14 @@ private void constructOneLBLocationMap(FileStatus fSta,
   }
 
 
-  public void createResourcePlan(WMResourcePlan resourcePlan, String 
copyFromName)
+  public void createResourcePlan(WMResourcePlan resourcePlan, String 
copyFromName, boolean ifNotExists)
       throws HiveException {
     try {
       getMSC().createResourcePlan(resourcePlan, copyFromName);
+    } catch (AlreadyExistsException e) {
+      if (!ifNotExists) {
+        throw new HiveException(e, ErrorMsg.RESOURCE_PLAN_ALREADY_EXISTS, 
resourcePlan.getName());
+      }
     } catch (Exception e) {
       throw new HiveException(e);
     }
@@ -5429,9 +5433,13 @@ private void constructOneLBLocationMap(FileStatus fSta,
     }
   }
 
-  public void dropResourcePlan(String rpName) throws HiveException {
+  public void dropResourcePlan(String rpName, boolean ifExists) throws 
HiveException {
     try {
       getMSC().dropResourcePlan(rpName);
+    } catch (NoSuchObjectException e) {
+      if (!ifExists) {
+        throw new HiveException(e, ErrorMsg.RESOURCE_PLAN_NOT_EXISTS, rpName);
+      }
     } catch (Exception e) {
       throw new HiveException(e);
     }
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
index 5f2a926cd56..647731629d0 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
@@ -907,6 +907,7 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
     String resourcePlanName = unescapeIdentifier(ast.getChild(0).getText());
     Integer queryParallelism = null;
     String likeName = null;
+    boolean ifNotExists = false;
     for (int i = 1; i < ast.getChildCount(); ++i) {
       Tree child = ast.getChild(i);
       switch (child.getType()) {
@@ -925,11 +926,14 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
           throw new SemanticException("Conflicting create arguments " + 
ast.toStringTree());
         }
         break;
+      case HiveParser.TOK_IFNOTEXISTS:
+        ifNotExists = true;
+        break;
       default: throw new SemanticException("Invalid create arguments " + 
ast.toStringTree());
       }
     }
     CreateResourcePlanDesc desc = new CreateResourcePlanDesc(
-        resourcePlanName, queryParallelism, likeName);
+        resourcePlanName, queryParallelism, likeName, ifNotExists);
     addServiceOutput();
     rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(), 
desc)));
   }
@@ -1072,7 +1076,17 @@ public class DDLSemanticAnalyzer extends 
BaseSemanticAnalyzer {
       throw new SemanticException("Expected name in DROP RESOURCE PLAN 
statement");
     }
     String rpName = unescapeIdentifier(ast.getChild(0).getText());
-    DropResourcePlanDesc desc = new DropResourcePlanDesc(rpName);
+    boolean ifExists = false;
+    for (int i = 1; i < ast.getChildCount(); ++i) {
+      Tree child = ast.getChild(i);
+      switch (child.getType()) {
+      case HiveParser.TOK_IFEXISTS:
+        ifExists = true;
+        break;
+      default: throw new SemanticException("Invalid create arguments " + 
ast.toStringTree());
+      }
+    }
+    DropResourcePlanDesc desc = new DropResourcePlanDesc(rpName, ifExists);
     addServiceOutput();
     rootTasks.add(TaskFactory.get(
         new DDLWork(getInputs(), getOutputs(), desc)));
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g 
b/ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g
index f8c47f972f2..0479c78f7bc 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g
+++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/ResourcePlanParser.g
@@ -93,9 +93,9 @@ rpUnassignList
 createResourcePlanStatement
 @init { gParent.pushMsg("create resource plan statement", state); }
 @after { gParent.popMsg(state); }
-    : KW_CREATE KW_RESOURCE KW_PLAN (
-          (name=identifier KW_LIKE likeName=identifier -> ^(TOK_CREATE_RP 
$name ^(TOK_LIKERP $likeName)))
-        | (name=identifier (KW_WITH rpAssignList)? -> ^(TOK_CREATE_RP $name 
rpAssignList?))
+    : KW_CREATE KW_RESOURCE KW_PLAN ifNotExists? (
+          (name=identifier KW_LIKE likeName=identifier -> ^(TOK_CREATE_RP 
$name ifNotExists? ^(TOK_LIKERP $likeName)))
+        | (name=identifier (KW_WITH rpAssignList)? -> ^(TOK_CREATE_RP $name 
ifNotExists? rpAssignList?))
       )
     ;
 
@@ -140,7 +140,7 @@ replaceResourcePlanStatement
 dropResourcePlanStatement
 @init { gParent.pushMsg("drop resource plan statement", state); }
 @after { gParent.popMsg(state); }
-    : KW_DROP KW_RESOURCE KW_PLAN name=identifier -> ^(TOK_DROP_RP $name)
+    : KW_DROP KW_RESOURCE KW_PLAN ifExists? name=identifier -> ^(TOK_DROP_RP 
$name ifExists?)
     ;
 
 poolPath
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java
index f645aa214e9..9c18f59d096 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/CreateResourcePlanDesc.java
@@ -28,17 +28,20 @@ public class CreateResourcePlanDesc extends DDLDesc 
implements Serializable {
 
   private WMResourcePlan resourcePlan;
   private String copyFromName;
+  private boolean ifNotExists;
 
   // For serialization only.
   public CreateResourcePlanDesc() {
   }
 
-  public CreateResourcePlanDesc(String planName, Integer queryParallelism, 
String copyFromName) {
+  public CreateResourcePlanDesc(String planName, Integer queryParallelism, 
String copyFromName,
+      boolean ifNotExists) {
     resourcePlan = new WMResourcePlan(planName);
     if (queryParallelism != null) {
       resourcePlan.setQueryParallelism(queryParallelism);
     }
     this.copyFromName = copyFromName;
+    this.ifNotExists = ifNotExists;
   }
 
   @Explain(displayName="resourcePlan", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
@@ -50,4 +53,10 @@ public class CreateResourcePlanDesc extends DDLDesc 
implements Serializable {
   public String getCopyFromName() {
     return copyFromName;
   }
+
+  @Explain(displayName="If not exists", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED },
+      displayOnlyOnTrue = true)
+  public boolean getIfNotExists() {
+    return ifNotExists;
+  }
 }
\ No newline at end of file
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropResourcePlanDesc.java 
b/ql/src/java/org/apache/hadoop/hive/ql/plan/DropResourcePlanDesc.java
index b67472de325..efaf0789b0d 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/DropResourcePlanDesc.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/DropResourcePlanDesc.java
@@ -27,11 +27,13 @@ public class DropResourcePlanDesc extends DDLDesc 
implements Serializable {
   private static final long serialVersionUID = 1258596919510047766L;
 
   private String rpName;
+  private boolean ifExists;
 
   public DropResourcePlanDesc() {}
 
-  public DropResourcePlanDesc(String rpName) {
+  public DropResourcePlanDesc(String rpName, boolean ifExists) {
     this.setRpName(rpName);
+    this.setIfExists(ifExists);
   }
 
   @Explain(displayName="resourcePlanName", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED })
@@ -39,8 +41,18 @@ public class DropResourcePlanDesc extends DDLDesc implements 
Serializable {
     return rpName;
   }
 
+  @Explain(displayName="ifExists", explainLevels = { Level.USER, 
Level.DEFAULT, Level.EXTENDED },
+      displayOnlyOnTrue = true)
+  public boolean getIfExists() {
+    return ifExists;
+  }
+
   public void setRpName(String rpName) {
     this.rpName = rpName;
   }
 
+  public void setIfExists(boolean ifExists) {
+    this.ifExists = ifExists;
+  }
+
 }
\ No newline at end of file
diff --git a/ql/src/test/queries/clientpositive/resourceplan.q 
b/ql/src/test/queries/clientpositive/resourceplan.q
index ecaa16533c5..3d4aa4028cc 100644
--- a/ql/src/test/queries/clientpositive/resourceplan.q
+++ b/ql/src/test/queries/clientpositive/resourceplan.q
@@ -34,6 +34,11 @@ SHOW RESOURCE PLANS;
 SHOW RESOURCE PLAN plan_2;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 
+-- Create plan with existing name, should fail
+CREATE RESOURCE PLAN plan_2;
+-- Create plan with existing name with IF NOT EXISTS
+CREATE RESOURCE PLAN IF NOT EXISTS plan_2;
+
 -- Should fail cannot set pool in create.
 CREATE RESOURCE PLAN plan_3 WITH QUERY_PARALLELISM=5, DEFAULT POOL = `all`;
 
@@ -138,6 +143,11 @@ DROP RESOURCE PLAN plan_2;
 DROP RESOURCE PLAN plan_3;
 SELECT * FROM SYS.WM_RESOURCEPLANS;
 
+-- Drop non existing resource plan, should fail
+DROP RESOURCE PLAN plan_99999;
+-- Drop non existing resource plan with IF EXISTS
+DROP RESOURCE PLAN IF EXISTS plan_99999;
+
 -- Use reserved keyword table as name.
 CREATE RESOURCE PLAN `table`;
 ALTER RESOURCE PLAN `table` SET QUERY_PARALLELISM = 1;
diff --git a/ql/src/test/results/clientpositive/llap/resourceplan.q.out 
b/ql/src/test/results/clientpositive/llap/resourceplan.q.out
index d88473429e2..d7e879c9dae 100644
--- a/ql/src/test/results/clientpositive/llap/resourceplan.q.out
+++ b/ql/src/test/results/clientpositive/llap/resourceplan.q.out
@@ -3231,6 +3231,15 @@ POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_1 DISABLED        NULL    default
 plan_2 DISABLED        10      default
+PREHOOK: query: CREATE RESOURCE PLAN plan_2
+PREHOOK: type: CREATE RESOURCEPLAN
+PREHOOK: Output: dummyHostnameForTest
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.DDLTask. Resource plan plan_2 already exists
+PREHOOK: query: CREATE RESOURCE PLAN IF NOT EXISTS plan_2
+PREHOOK: type: CREATE RESOURCEPLAN
+PREHOOK: Output: dummyHostnameForTest
+POSTHOOK: query: CREATE RESOURCE PLAN IF NOT EXISTS plan_2
+POSTHOOK: type: CREATE RESOURCEPLAN
 FAILED: SemanticException Invalid create arguments (tok_create_rp plan_3 
(tok_query_parallelism 5) (tok_default_pool all))
 PREHOOK: query: ALTER RESOURCE PLAN plan_1 RENAME TO plan_2
 PREHOOK: type: ALTER RESOURCEPLAN
@@ -3548,6 +3557,15 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 ACTIVE  10      default
+PREHOOK: query: DROP RESOURCE PLAN plan_99999
+PREHOOK: type: DROP RESOURCEPLAN
+PREHOOK: Output: dummyHostnameForTest
+FAILED: Execution Error, return code 1 from 
org.apache.hadoop.hive.ql.exec.DDLTask. Resource plan plan_99999 does not exist
+PREHOOK: query: DROP RESOURCE PLAN IF EXISTS plan_99999
+PREHOOK: type: DROP RESOURCEPLAN
+PREHOOK: Output: dummyHostnameForTest
+POSTHOOK: query: DROP RESOURCE PLAN IF EXISTS plan_99999
+POSTHOOK: type: DROP RESOURCEPLAN
 PREHOOK: query: CREATE RESOURCE PLAN `table`
 PREHOOK: type: CREATE RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
diff --git 
a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java 
b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
index 8ba2d1892bf..1835af00667 100644
--- a/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
+++ b/service/src/java/org/apache/hive/service/cli/thrift/ThriftCLIService.java
@@ -428,7 +428,11 @@ public abstract class ThriftCLIService extends 
AbstractService implements TCLISe
       userName = req.getUsername();
     }
 
-    userName = getShortName(userName);
+    if 
(cliService.getHiveConf().getBoolVar(ConfVars.HIVE_AUTHORIZATION_KERBEROS_USE_SHORTNAME))
+    {
+      userName = getShortName(userName);
+    }
+
     String effectiveClientUser = getProxyUser(userName, 
req.getConfiguration(), getIpAddress());
     LOG.debug("Client's username: {}", effectiveClientUser);
     return effectiveClientUser;


Reply via email to