Modified: 
hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- 
hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
 (original)
+++ 
hive/trunk/itests/hive-unit/src/test/java/org/apache/hadoop/hive/jdbc/TestJdbcDriver.java
 Sun Jul 13 18:54:08 2014
@@ -19,7 +19,7 @@
 package org.apache.hadoop.hive.jdbc;
 
 import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME;
-import static 
org.apache.hadoop.hive.ql.processors.SetProcessor.SET_COLUMN_NAME;
+import static org.apache.hadoop.hive.conf.SystemVariables.SET_COLUMN_NAME;
 
 import java.sql.Connection;
 import java.sql.DatabaseMetaData;

Modified: 
hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- 
hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
 (original)
+++ 
hive/trunk/itests/hive-unit/src/test/java/org/apache/hive/jdbc/TestJdbcDriver2.java
 Sun Jul 13 18:54:08 2014
@@ -19,7 +19,7 @@
 package org.apache.hive.jdbc;
 
 import static org.apache.hadoop.hive.ql.exec.ExplainTask.EXPL_COLUMN_NAME;
-import static 
org.apache.hadoop.hive.ql.processors.SetProcessor.SET_COLUMN_NAME;
+import static org.apache.hadoop.hive.conf.SystemVariables.SET_COLUMN_NAME;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
@@ -1888,7 +1888,7 @@ public class TestJdbcDriver2 {
    */
   @Test
   public void testFetchFirstSetCmds() throws Exception {
-    execFetchFirst("set -v", SetProcessor.SET_COLUMN_NAME, false);
+    execFetchFirst("set -v", SET_COLUMN_NAME, false);
   }
 
   /**

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/exec/DDLTask.java Sun Jul 
13 18:54:08 2014
@@ -150,6 +150,7 @@ import org.apache.hadoop.hive.ql.plan.Re
 import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
 import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowCompactionsDesc;
+import org.apache.hadoop.hive.ql.plan.ShowConfDesc;
 import org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc;
 import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc;
 import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc;
@@ -430,6 +431,11 @@ public class DDLTask extends Task<DDLWor
         return showCreateTable(db, showCreateTbl);
       }
 
+      ShowConfDesc showConf = work.getShowConfDesc();
+      if (showConf != null) {
+        return showConf(db, showConf);
+      }
+
       RoleDDLDesc roleDDLDesc = work.getRoleDDLDesc();
       if (roleDDLDesc != null) {
         return roleDDL(roleDDLDesc);
@@ -492,6 +498,38 @@ public class DDLTask extends Task<DDLWor
     return 0;
   }
 
+  private int showConf(Hive db, ShowConfDesc showConf) throws Exception {
+    ConfVars conf = HiveConf.getConfVars(showConf.getConfName());
+    if (conf == null) {
+      throw new HiveException("invalid configuration name " + 
showConf.getConfName());
+    }
+    String description = conf.getDescription();
+    String defaltValue = conf.getDefaultValue();
+    DataOutputStream output = getOutputStream(showConf.getResFile());
+    try {
+      if (description != null) {
+        if (defaltValue != null) {
+          output.write(defaltValue.getBytes());
+        }
+        output.write(separator);
+        output.write(conf.typeString().getBytes());
+        output.write(separator);
+        if (description != null) {
+          output.write(description.replaceAll(" *\n *", " ").getBytes());
+        }
+        output.write(terminator);
+      }
+    } finally {
+      output.close();
+    }
+    return 0;
+  }
+
+  private DataOutputStream getOutputStream(Path outputFile) throws Exception {
+    FileSystem fs = outputFile.getFileSystem(conf);
+    return fs.create(outputFile);
+  }
+
   /**
    * First, make sure the source table/partition is not
    * archived/indexes/non-rcfile. If either of these is true, throw an

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcFile.java Sun 
Jul 13 18:54:08 2014
@@ -26,6 +26,8 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
 
+import static org.apache.hadoop.hive.conf.HiveConf.ConfVars.*;
+
 /**
  * Contains factory methods to read or write ORC files.
  */
@@ -233,29 +235,13 @@ public final class OrcFile {
     WriterOptions(Configuration conf) {
       configuration = conf;
       memoryManagerValue = getMemoryManager(conf);
-      stripeSizeValue =
-          conf.getLong(HiveConf.ConfVars.HIVE_ORC_DEFAULT_STRIPE_SIZE.varname,
-              HiveConf.ConfVars.HIVE_ORC_DEFAULT_STRIPE_SIZE.defaultLongVal);
-      blockSizeValue =
-          conf.getLong(HiveConf.ConfVars.HIVE_ORC_DEFAULT_BLOCK_SIZE.varname,
-              HiveConf.ConfVars.HIVE_ORC_DEFAULT_BLOCK_SIZE.defaultLongVal);
-      rowIndexStrideValue =
-          conf.getInt(HiveConf.ConfVars.HIVE_ORC_DEFAULT_ROW_INDEX_STRIDE
-              .varname, 
HiveConf.ConfVars.HIVE_ORC_DEFAULT_ROW_INDEX_STRIDE.defaultIntVal);
-      bufferSizeValue =
-          conf.getInt(HiveConf.ConfVars.HIVE_ORC_DEFAULT_BUFFER_SIZE.varname,
-              HiveConf.ConfVars.HIVE_ORC_DEFAULT_BUFFER_SIZE.defaultIntVal);
-      blockPaddingValue =
-          conf.getBoolean(HiveConf.ConfVars.HIVE_ORC_DEFAULT_BLOCK_PADDING
-              .varname, HiveConf.ConfVars.HIVE_ORC_DEFAULT_BLOCK_PADDING
-              .defaultBoolVal);
-      compressValue = 
-          CompressionKind.valueOf(conf.get(HiveConf.ConfVars
-              .HIVE_ORC_DEFAULT_COMPRESS.varname,
-              HiveConf.ConfVars
-              .HIVE_ORC_DEFAULT_COMPRESS.defaultVal));
-      String versionName =
-        conf.get(HiveConf.ConfVars.HIVE_ORC_WRITE_FORMAT.varname);
+      stripeSizeValue = HiveConf.getLongVar(conf, 
HIVE_ORC_DEFAULT_STRIPE_SIZE);
+      blockSizeValue = HiveConf.getLongVar(conf, HIVE_ORC_DEFAULT_BLOCK_SIZE);
+      rowIndexStrideValue = HiveConf.getIntVar(conf, 
HIVE_ORC_DEFAULT_ROW_INDEX_STRIDE);
+      bufferSizeValue = HiveConf.getIntVar(conf, HIVE_ORC_DEFAULT_BUFFER_SIZE);
+      blockPaddingValue = HiveConf.getBoolVar(conf, 
HIVE_ORC_DEFAULT_BLOCK_PADDING);
+      compressValue = CompressionKind.valueOf(HiveConf.getVar(conf, 
HIVE_ORC_DEFAULT_COMPRESS));
+      String versionName = HiveConf.getVar(conf, HIVE_ORC_WRITE_FORMAT);
       if (versionName == null) {
         versionValue = Version.CURRENT;
       } else {

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/DDLSemanticAnalyzer.java 
Sun Jul 13 18:54:08 2014
@@ -57,7 +57,6 @@ import org.apache.hadoop.hive.metastore.
 import org.apache.hadoop.hive.ql.Driver;
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.ArchiveUtils;
-import org.apache.hadoop.hive.ql.exec.DDLTask;
 import org.apache.hadoop.hive.ql.exec.FetchTask;
 import org.apache.hadoop.hive.ql.exec.FunctionRegistry;
 import org.apache.hadoop.hive.ql.exec.Task;
@@ -117,6 +116,7 @@ import org.apache.hadoop.hive.ql.plan.Re
 import org.apache.hadoop.hive.ql.plan.RoleDDLDesc;
 import org.apache.hadoop.hive.ql.plan.ShowColumnsDesc;
 import org.apache.hadoop.hive.ql.plan.ShowCompactionsDesc;
+import org.apache.hadoop.hive.ql.plan.ShowConfDesc;
 import org.apache.hadoop.hive.ql.plan.ShowCreateTableDesc;
 import org.apache.hadoop.hive.ql.plan.ShowDatabasesDesc;
 import org.apache.hadoop.hive.ql.plan.ShowFunctionsDesc;
@@ -334,6 +334,10 @@ public class DDLSemanticAnalyzer extends
       ctx.setResFile(ctx.getLocalTmpPath());
       analyzeShowTxns(ast);
       break;
+    case HiveParser.TOK_SHOWCONF:
+      ctx.setResFile(ctx.getLocalTmpPath());
+      analyzeShowConf(ast);
+      break;
     case HiveParser.TOK_DESCFUNCTION:
       ctx.setResFile(ctx.getLocalTmpPath());
       analyzeDescFunction(ast);
@@ -2338,7 +2342,15 @@ public class DDLSemanticAnalyzer extends
     ctx.setNeedLockMgr(true);
   }
 
-   /**
+  private void analyzeShowConf(ASTNode ast) throws SemanticException {
+    String confName = stripQuotes(ast.getChild(0).getText());
+    ShowConfDesc showConfDesc = new ShowConfDesc(ctx.getResFile(), confName);
+    rootTasks.add(TaskFactory.get(new DDLWork(getInputs(), getOutputs(),
+        showConfDesc), conf));
+    setFetchTask(createFetchTask(showConfDesc.getSchema()));
+  }
+
+  /**
    * Add the task according to the parsed command tree. This is used for the 
CLI
    * command "LOCK TABLE ..;".
    *

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g Sun Jul 
13 18:54:08 2014
@@ -296,6 +296,7 @@ KW_COMPACTIONS: 'COMPACTIONS';
 KW_TRANSACTIONS: 'TRANSACTIONS';
 KW_REWRITE : 'REWRITE';
 KW_AUTHORIZATION: 'AUTHORIZATION';
+KW_CONF: 'CONF';
 
 // Operators
 // NOTE: if you add a new function/operator, add it to sysFuncNames so that 
describe function _FUNC_ will work.

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g Sun Jul 
13 18:54:08 2014
@@ -163,6 +163,7 @@ TOK_SHOW_CREATETABLE;
 TOK_SHOW_TABLESTATUS;
 TOK_SHOW_TBLPROPERTIES;
 TOK_SHOWLOCKS;
+TOK_SHOWCONF;
 TOK_LOCKTABLE;
 TOK_UNLOCKTABLE;
 TOK_LOCKDB;
@@ -1337,6 +1338,7 @@ showStatement
     -> ^(TOK_SHOWINDEXES showStmtIdentifier $showOptions? $db_name?)
     | KW_SHOW KW_COMPACTIONS -> ^(TOK_SHOW_COMPACTIONS)
     | KW_SHOW KW_TRANSACTIONS -> ^(TOK_SHOW_TRANSACTIONS)
+    | KW_SHOW KW_CONF StringLiteral -> ^(TOK_SHOWCONF StringLiteral)
     ;
 
 lockStatement

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzerFactory.java
 Sun Jul 13 18:54:08 2014
@@ -69,6 +69,7 @@ public final class SemanticAnalyzerFacto
     commandType.put(HiveParser.TOK_SHOWPARTITIONS, 
HiveOperation.SHOWPARTITIONS);
     commandType.put(HiveParser.TOK_SHOWLOCKS, HiveOperation.SHOWLOCKS);
     commandType.put(HiveParser.TOK_SHOWDBLOCKS, HiveOperation.SHOWLOCKS);
+    commandType.put(HiveParser.TOK_SHOWCONF, HiveOperation.SHOWCONF);
     commandType.put(HiveParser.TOK_CREATEFUNCTION, 
HiveOperation.CREATEFUNCTION);
     commandType.put(HiveParser.TOK_DROPFUNCTION, HiveOperation.DROPFUNCTION);
     commandType.put(HiveParser.TOK_CREATEMACRO, HiveOperation.CREATEMACRO);
@@ -203,6 +204,7 @@ public final class SemanticAnalyzerFacto
       case HiveParser.TOK_SHOWDBLOCKS:
       case HiveParser.TOK_SHOW_COMPACTIONS:
       case HiveParser.TOK_SHOW_TRANSACTIONS:
+      case HiveParser.TOK_SHOWCONF:
       case HiveParser.TOK_CREATEINDEX:
       case HiveParser.TOK_DROPINDEX:
       case HiveParser.TOK_ALTERTABLE_CLUSTER_SORT:

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/parse/VariableSubstitution.java
 Sun Jul 13 18:54:08 2014
@@ -17,79 +17,44 @@
  */
 package org.apache.hadoop.hive.ql.parse;
 
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.hive.ql.processors.SetProcessor;
 import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.conf.SystemVariables;
+
+import java.util.Map;
 
-public class VariableSubstitution {
+public class VariableSubstitution extends SystemVariables {
 
   private static final Log l4j = LogFactory.getLog(VariableSubstitution.class);
-  protected static Pattern varPat = 
Pattern.compile("\\$\\{[^\\}\\$\u0020]+\\}");
 
-  private String getSubstitute(HiveConf conf, String var) {
-    String val = null;
-    try {
-      if (var.startsWith(SetProcessor.SYSTEM_PREFIX)) {
-        val = 
System.getProperty(var.substring(SetProcessor.SYSTEM_PREFIX.length()));
-      }
-    } catch(SecurityException se) {
-      l4j.warn("Unexpected SecurityException in Configuration", se);
-    }
-    if (val ==null){
-      if (var.startsWith(SetProcessor.ENV_PREFIX)){
-        val = System.getenv(var.substring(SetProcessor.ENV_PREFIX.length()));
-      }
-    }
-    if (val == null) {
-      if (var.startsWith(SetProcessor.HIVECONF_PREFIX)){
-        val = conf.get(var.substring(SetProcessor.HIVECONF_PREFIX.length()));
-      }
-    }
-    if (val ==null){
-      if(var.startsWith(SetProcessor.HIVEVAR_PREFIX)){
-        val =  
SessionState.get().getHiveVariables().get(var.substring(SetProcessor.HIVEVAR_PREFIX.length()));
+  @Override
+  protected String getSubstitute(Configuration conf, String var) {
+    String val = super.getSubstitute(conf, var);
+    if (val == null && SessionState.get() != null) {
+      Map<String,String> vars = SessionState.get().getHiveVariables();
+      if (var.startsWith(HIVEVAR_PREFIX)) {
+        val =  vars.get(var.substring(HIVEVAR_PREFIX.length()));
       } else {
-        val = SessionState.get().getHiveVariables().get(var);
+        val = vars.get(var);
       }
     }
     return val;
   }
 
-  public String substitute (HiveConf conf, String expr) {
-
-    if (conf.getBoolVar(ConfVars.HIVEVARIABLESUBSTITUTE)){
-      l4j.debug("Substitution is on: "+expr);
-    } else {
-      return expr;
-    }
+  public String substitute(HiveConf conf, String expr) {
     if (expr == null) {
-      return null;
+      return expr;
     }
-    Matcher match = varPat.matcher("");
-    String eval = expr;
-    for(int s=0;s<conf.getIntVar(ConfVars.HIVEVARIABLESUBSTITUTEDEPTH); s++) {
-      match.reset(eval);
-      if (!match.find()) {
-        return eval;
-      }
-      String var = match.group();
-      var = var.substring(2, var.length()-1); // remove ${ .. }
-      String val = getSubstitute(conf, var);
-
-      if (val == null) {
-        l4j.debug("Interpolation result: "+eval);
-        return eval; // return literal, no substitution found
-      }
-      // substitute
-      eval = eval.substring(0, match.start())+val+eval.substring(match.end());
+    if (HiveConf.getBoolVar(conf, ConfVars.HIVEVARIABLESUBSTITUTE)) {
+      l4j.debug("Substitution is on: " + expr);
+    } else {
+      return expr;
     }
-    throw new IllegalStateException("Variable substitution depth too large: "
-                                    + 
conf.getIntVar(ConfVars.HIVEVARIABLESUBSTITUTEDEPTH) + " " + expr);
+    int depth = HiveConf.getIntVar(conf, ConfVars.HIVEVARIABLESUBSTITUTEDEPTH);
+    return substitute(conf, expr, depth);
   }
 }

Modified: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/DDLWork.java Sun Jul 
13 18:54:08 2014
@@ -76,6 +76,8 @@ public class DDLWork implements Serializ
   private RevokeDesc revokeDesc;
   private GrantRevokeRoleDDL grantRevokeRoleDDL;
 
+  private ShowConfDesc showConfDesc;
+
   boolean needLock = false;
 
   /**
@@ -139,6 +141,12 @@ public class DDLWork implements Serializ
     this.truncateTblDesc = truncateTblDesc;
   }
 
+  public DDLWork(HashSet<ReadEntity> inputs, HashSet<WriteEntity> outputs,
+      ShowConfDesc showConfDesc) {
+    this(inputs, outputs);
+    this.showConfDesc = showConfDesc;
+  }
+
   public DescDatabaseDesc getDescDatabaseDesc() {
     return descDbDesc;
   }
@@ -1117,4 +1125,12 @@ public class DDLWork implements Serializ
       AlterTableExchangePartition alterTableExchangePartition) {
     this.alterTableExchangePartition = alterTableExchangePartition;
   }
+
+  public ShowConfDesc getShowConfDesc() {
+    return showConfDesc;
+  }
+
+  public void setShowConfDesc(ShowConfDesc showConfDesc) {
+    this.showConfDesc = showConfDesc;
+  }
 }

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java 
(original)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/HiveOperation.java 
Sun Jul 13 18:54:08 2014
@@ -67,6 +67,7 @@ public enum HiveOperation {
   SHOWINDEXES("SHOWINDEXES", null, null),
   SHOWPARTITIONS("SHOWPARTITIONS", null, null),
   SHOWLOCKS("SHOWLOCKS", null, null),
+  SHOWCONF("SHOWCONF", null, null),
   CREATEFUNCTION("CREATEFUNCTION", null, null),
   DROPFUNCTION("DROPFUNCTION", null, null),
   CREATEMACRO("CREATEMACRO", null, null),

Added: hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java?rev=1610279&view=auto
==============================================================================
--- hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java 
(added)
+++ hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/plan/ShowConfDesc.java Sun 
Jul 13 18:54:08 2014
@@ -0,0 +1,61 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.plan;
+
+import org.apache.hadoop.fs.Path;
+
+import java.io.Serializable;
+
+public class ShowConfDesc extends DDLDesc implements Serializable {
+  private static final long serialVersionUID = 1L;
+
+  private Path resFile;
+  private String confName;
+
+  private static final String schema = 
"default,type,desc#string,string,string";
+
+  public String getSchema() {
+    return schema;
+  }
+
+  public ShowConfDesc() {
+  }
+
+  public ShowConfDesc(Path resFile, String confName) {
+    this.resFile = resFile;
+    this.confName = confName;
+  }
+
+  @Explain(displayName = "result file", normalExplain = false)
+  public Path getResFile() {
+    return resFile;
+  }
+
+  public void setResFile(Path resFile) {
+    this.resFile = resFile;
+  }
+
+  @Explain(displayName = "conf name", normalExplain = false)
+  public String getConfName() {
+    return confName;
+  }
+
+  public void setConfName(String confName) {
+    this.confName = confName;
+  }
+}

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java 
(original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/processors/SetProcessor.java 
Sun Jul 13 18:54:08 2014
@@ -22,6 +22,8 @@ import static org.apache.hadoop.hive.ser
 import static org.apache.hadoop.hive.serde.serdeConstants.STRING_TYPE_NAME;
 import static 
org.apache.hadoop.hive.serde2.MetadataTypedColumnsetSerDe.defaultNullString;
 
+import static org.apache.hadoop.hive.conf.SystemVariables.*;
+
 import java.util.Map;
 import java.util.Properties;
 import java.util.SortedMap;
@@ -39,12 +41,7 @@ import org.apache.hadoop.hive.ql.session
  */
 public class SetProcessor implements CommandProcessor {
 
-  private static String prefix = "set: ";
-  public static final String ENV_PREFIX = "env:";
-  public static final String SYSTEM_PREFIX = "system:";
-  public static final String HIVECONF_PREFIX = "hiveconf:";
-  public static final String HIVEVAR_PREFIX = "hivevar:";
-  public static final String SET_COLUMN_NAME = "set";
+  private static final String prefix = "set: ";
 
   public static boolean getBoolean(String value) {
     if (value.equals("on") || value.equals("true")) {
@@ -69,7 +66,7 @@ public class SetProcessor implements Com
 
     // Inserting hive variables
     for (String s : ss.getHiveVariables().keySet()) {
-      sortedMap.put(SetProcessor.HIVEVAR_PREFIX + s, 
ss.getHiveVariables().get(s));
+      sortedMap.put(HIVEVAR_PREFIX + s, ss.getHiveVariables().get(s));
     }
 
     for (Map.Entry<String, String> entries : sortedMap.entrySet()) {
@@ -117,17 +114,17 @@ public class SetProcessor implements Com
       ss.err.println("Warning: Value had a \\n character in it.");
     }
     varname = varname.trim();
-    if (varname.startsWith(SetProcessor.ENV_PREFIX)){
+    if (varname.startsWith(ENV_PREFIX)){
       ss.err.println("env:* variables can not be set.");
       return 1;
-    } else if (varname.startsWith(SetProcessor.SYSTEM_PREFIX)){
-      String propName = varname.substring(SetProcessor.SYSTEM_PREFIX.length());
+    } else if (varname.startsWith(SYSTEM_PREFIX)){
+      String propName = varname.substring(SYSTEM_PREFIX.length());
       System.getProperties().setProperty(propName, new 
VariableSubstitution().substitute(ss.getConf(),varvalue));
-    } else if (varname.startsWith(SetProcessor.HIVECONF_PREFIX)){
-      String propName = 
varname.substring(SetProcessor.HIVECONF_PREFIX.length());
+    } else if (varname.startsWith(HIVECONF_PREFIX)){
+      String propName = varname.substring(HIVECONF_PREFIX.length());
       setConf(varname, propName, varvalue, false);
-    } else if (varname.startsWith(SetProcessor.HIVEVAR_PREFIX)) {
-      String propName = 
varname.substring(SetProcessor.HIVEVAR_PREFIX.length());
+    } else if (varname.startsWith(HIVEVAR_PREFIX)) {
+      String propName = varname.substring(HIVEVAR_PREFIX.length());
       ss.getHiveVariables().put(propName, new 
VariableSubstitution().substitute(ss.getConf(),varvalue));
     } else {
       setConf(varname, varname, varvalue, true);
@@ -169,7 +166,7 @@ public class SetProcessor implements Com
 
   private SortedMap<String,String> propertiesToSortedMap(Properties p){
     SortedMap<String,String> sortedPropMap = new TreeMap<String,String>();
-    for (Map.Entry<Object, Object> entry :System.getProperties().entrySet() ){
+    for (Map.Entry<Object, Object> entry : p.entrySet() ){
       sortedPropMap.put( (String) entry.getKey(), (String) entry.getValue());
     }
     return sortedPropMap;
@@ -188,38 +185,38 @@ public class SetProcessor implements Com
       ss.out.println("silent" + "=" + ss.getIsSilent());
       return createProcessorSuccessResponse();
     }
-    if (varname.startsWith(SetProcessor.SYSTEM_PREFIX)){
-      String propName = varname.substring(SetProcessor.SYSTEM_PREFIX.length());
+    if (varname.startsWith(SYSTEM_PREFIX)) {
+      String propName = varname.substring(SYSTEM_PREFIX.length());
       String result = System.getProperty(propName);
-      if (result != null){
-        ss.out.println(SetProcessor.SYSTEM_PREFIX+propName + "=" + result);
+      if (result != null) {
+        ss.out.println(SYSTEM_PREFIX + propName + "=" + result);
         return createProcessorSuccessResponse();
       } else {
-        ss.out.println( propName + " is undefined as a system property");
+        ss.out.println(propName + " is undefined as a system property");
         return new CommandProcessorResponse(1);
       }
-    } else if (varname.indexOf(SetProcessor.ENV_PREFIX)==0){
+    } else if (varname.indexOf(ENV_PREFIX) == 0) {
       String var = varname.substring(ENV_PREFIX.length());
-      if (System.getenv(var)!=null){
-        ss.out.println(SetProcessor.ENV_PREFIX+var + "=" + System.getenv(var));
+      if (System.getenv(var) != null) {
+        ss.out.println(ENV_PREFIX + var + "=" + System.getenv(var));
         return createProcessorSuccessResponse();
       } else {
         ss.out.println(varname + " is undefined as an environmental variable");
         return new CommandProcessorResponse(1);
       }
-    } else if (varname.indexOf(SetProcessor.HIVECONF_PREFIX)==0) {
-      String var = varname.substring(SetProcessor.HIVECONF_PREFIX.length());
-      if (ss.getConf().get(var)!=null){
-        ss.out.println(SetProcessor.HIVECONF_PREFIX+var + "=" + 
ss.getConf().get(var));
+    } else if (varname.indexOf(HIVECONF_PREFIX) == 0) {
+      String var = varname.substring(HIVECONF_PREFIX.length());
+      if (ss.getConf().get(var) != null) {
+        ss.out.println(HIVECONF_PREFIX + var + "=" + ss.getConf().get(var));
         return createProcessorSuccessResponse();
       } else {
         ss.out.println(varname + " is undefined as a hive configuration 
variable");
         return new CommandProcessorResponse(1);
       }
-    } else if (varname.indexOf(SetProcessor.HIVEVAR_PREFIX)==0) {
-      String var = varname.substring(SetProcessor.HIVEVAR_PREFIX.length());
-      if (ss.getHiveVariables().get(var)!=null){
-        ss.out.println(SetProcessor.HIVEVAR_PREFIX+var + "=" + 
ss.getHiveVariables().get(var));
+    } else if (varname.indexOf(HIVEVAR_PREFIX) == 0) {
+      String var = varname.substring(HIVEVAR_PREFIX.length());
+      if (ss.getHiveVariables().get(var) != null) {
+        ss.out.println(HIVEVAR_PREFIX + var + "=" + 
ss.getHiveVariables().get(var));
         return createProcessorSuccessResponse();
       } else {
         ss.out.println(varname + " is undefined as a hive variable");

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveOperationType.java
 Sun Jul 13 18:54:08 2014
@@ -69,6 +69,7 @@ public enum HiveOperationType {
   SHOWINDEXES,
   SHOWPARTITIONS,
   SHOWLOCKS,
+  SHOWCONF,
   CREATEFUNCTION,
   DROPFUNCTION,
   CREATEMACRO,

Modified: 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
 (original)
+++ 
hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/sqlstd/Operation2Privilege.java
 Sun Jul 13 18:54:08 2014
@@ -46,7 +46,7 @@ public class Operation2Privilege {
 
 
     private PrivRequirement(SQLPrivTypeGrant[] privs, IOType ioType) {
-      this(privs, ioType, (HivePrivObjectActionType) null);
+      this(privs, ioType, null);
     }
 
     private PrivRequirement(SQLPrivTypeGrant[] privs, IOType ioType,
@@ -291,6 +291,8 @@ public class Operation2Privilege {
 (null, null));
     op2Priv.put(HiveOperationType.SHOW_TRANSACTIONS, 
PrivRequirement.newIOPrivRequirement
 (null, null));
+    op2Priv.put(HiveOperationType.SHOWCONF, 
PrivRequirement.newIOPrivRequirement
+(null, null));
 
     op2Priv.put(HiveOperationType.LOCKTABLE, 
PrivRequirement.newIOPrivRequirement
 (null, null));

Added: hive/trunk/ql/src/test/queries/clientpositive/show_conf.q
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/queries/clientpositive/show_conf.q?rev=1610279&view=auto
==============================================================================
--- hive/trunk/ql/src/test/queries/clientpositive/show_conf.q (added)
+++ hive/trunk/ql/src/test/queries/clientpositive/show_conf.q Sun Jul 13 
18:54:08 2014
@@ -0,0 +1,3 @@
+show conf "hive.auto.convert.sortmerge.join.to.mapjoin";
+
+show conf "hive.stats.retries.wait";

Added: hive/trunk/ql/src/test/results/clientpositive/show_conf.q.out
URL: 
http://svn.apache.org/viewvc/hive/trunk/ql/src/test/results/clientpositive/show_conf.q.out?rev=1610279&view=auto
==============================================================================
--- hive/trunk/ql/src/test/results/clientpositive/show_conf.q.out (added)
+++ hive/trunk/ql/src/test/results/clientpositive/show_conf.q.out Sun Jul 13 
18:54:08 2014
@@ -0,0 +1,10 @@
+PREHOOK: query: show conf "hive.auto.convert.sortmerge.join.to.mapjoin"
+PREHOOK: type: SHOWCONF
+POSTHOOK: query: show conf "hive.auto.convert.sortmerge.join.to.mapjoin"
+POSTHOOK: type: SHOWCONF
+false  BOOLEAN If hive.auto.convert.sortmerge.join is set to true, and a join 
was converted to a sort-merge join, this parameter decides whether each table 
should be tried as a big table, and effectively a map-join should be tried. 
That would create a conditional task with n+1 children for a n-way join (1 
child for each table as the big table), and the backup task will be the 
sort-merge join. In some cases, a map-join would be faster than a sort-merge 
join, if there is no advantage of having the output bucketed and sorted. For 
example, if a very big sorted and bucketed table with few files (say 10 files) 
are being joined with a very small sorter and bucketed table with few files (10 
files), the sort-merge join will only use 10 mappers, and a simple map-only 
join might be faster if the complete small table can fit in memory, and a 
map-join can be performed.
+PREHOOK: query: show conf "hive.stats.retries.wait"
+PREHOOK: type: SHOWCONF
+POSTHOOK: query: show conf "hive.stats.retries.wait"
+POSTHOOK: type: SHOWCONF
+3000   INT     The base waiting window (in milliseconds) before the next 
retry. The actual wait time is calculated by baseWindow * failures baseWindow * 
(failure  1) * (random number between [0.0,1.0]).

Modified: 
hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java
URL: 
http://svn.apache.org/viewvc/hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java?rev=1610279&r1=1610278&r2=1610279&view=diff
==============================================================================
--- hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java 
(original)
+++ hive/trunk/service/src/java/org/apache/hive/service/cli/CLIService.java Sun 
Jul 13 18:54:08 2014
@@ -35,6 +35,7 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.conf.SystemVariables;
 import org.apache.hadoop.hive.metastore.HiveMetaStoreClient;
 import org.apache.hadoop.hive.metastore.IMetaStoreClient;
 import org.apache.hadoop.hive.ql.metadata.Hive;
@@ -443,7 +444,10 @@ public class CLIService extends Composit
 
   // create the give Path if doesn't exists and make it writable
   private void setupStagingDir(String dirPath, boolean isLocal) throws 
IOException {
-    Path scratchDir = new Path(dirPath);
+    Path scratchDir = getStaticPath(new Path(dirPath));
+    if (scratchDir == null) {
+      return;
+    }
     FileSystem fs;
     if (isLocal) {
       fs = FileSystem.getLocal(hiveConf);
@@ -480,4 +484,16 @@ public class CLIService extends Composit
     sessionManager.getSession(sessionHandle).renewDelegationToken(authFactory, 
tokenStr);
     LOG.info(sessionHandle  + ": renewDelegationToken()");
   }
+
+  // DOWNLOADED_RESOURCES_DIR for example, which is by default 
${system:java.io.tmpdir}/${hive.session.id}_resources,
+  // {system:java.io.tmpdir} would be already evaluated but ${hive.session.id} 
would be not in here.
+  // for that case, this returns evaluatd parts only, in this case, "/tmp"
+  // what for ${hive.session.id}_resources/${system:java.io.tmpdir}? just 
don't do that.
+  private Path getStaticPath(Path path) {
+    Path current = path;
+    for (; current != null && SystemVariables.containsVar(current.getName());
+        current = current.getParent()) {
+    }
+    return current;
+  }
 }


Reply via email to