http://git-wip-us.apache.org/repos/asf/hive/blob/61ec445c/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
----------------------------------------------------------------------
diff --git a/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql 
b/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
index 3444439..d9606d8 100644
--- a/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
+++ b/metastore/scripts/upgrade/hive/hive-schema-3.0.0.hive.sql
@@ -96,7 +96,7 @@ TBLPROPERTIES (
   \"OWNER_NAME\",
   \"OWNER_TYPE\"
 FROM
-  DBS"
+  \"DBS\""
 );
 
 CREATE TABLE IF NOT EXISTS `DB_PRIVS` (
@@ -605,7 +605,7 @@ TBLPROPERTIES (
   \"VIEW_EXPANDED_TEXT\",
   \"VIEW_ORIGINAL_TEXT\",
   \"IS_REWRITE_ENABLED\"
-FROM TBLS"
+FROM \"TBLS\""
 );
 
 CREATE TABLE IF NOT EXISTS `MV_CREATION_METADATA` (
@@ -624,7 +624,7 @@ TBLPROPERTIES (
   \"DB_NAME\",
   \"TBL_NAME\",
   \"TXN_LIST\"
-FROM MV_CREATION_METADATA"
+FROM \"MV_CREATION_METADATA\""
 );
 
 CREATE TABLE IF NOT EXISTS `MV_TABLES_USED` (
@@ -638,7 +638,7 @@ TBLPROPERTIES (
 "SELECT
   \"MV_CREATION_METADATA_ID\",
   \"TBL_ID\"
-FROM MV_TABLES_USED"
+FROM \"MV_TABLES_USED\""
 );
 
 CREATE TABLE IF NOT EXISTS `TBL_COL_PRIVS` (
@@ -964,16 +964,16 @@ TBLPROPERTIES (
 "hive.sql.database.type" = "METASTORE",
 "hive.sql.query" =
 "SELECT
-  r.NAME RP_NAME,
-  t.NAME NAME,
-  TRIGGER_EXPRESSION,
-  ACTION_EXPRESSION
+  r.\"NAME\" AS RP_NAME,
+  t.\"NAME\" AS NAME,
+  \"TRIGGER_EXPRESSION\",
+  \"ACTION_EXPRESSION\"
 FROM
-  WM_TRIGGER t
+  \"WM_TRIGGER\" t
 JOIN
-  WM_RESOURCEPLAN r
+  \"WM_RESOURCEPLAN\" r
 ON
-  t.RP_ID = r.RP_ID"
+  t.\"RP_ID\" = r.\"RP_ID\""
 );
 
 CREATE TABLE IF NOT EXISTS `WM_POOLS` (
@@ -988,17 +988,17 @@ TBLPROPERTIES (
 "hive.sql.database.type" = "METASTORE",
 "hive.sql.query" =
 "SELECT
-  WM_RESOURCEPLAN.NAME,
-  WM_POOL.PATH,
-  WM_POOL.ALLOC_FRACTION,
-  WM_POOL.QUERY_PARALLELISM,
-  WM_POOL.SCHEDULING_POLICY
+  \"WM_RESOURCEPLAN\".\"NAME\",
+  \"WM_POOL\".\"PATH\",
+  \"WM_POOL\".\"ALLOC_FRACTION\",
+  \"WM_POOL\".\"QUERY_PARALLELISM\",
+  \"WM_POOL\".\"SCHEDULING_POLICY\"
 FROM
-  WM_POOL
+  \"WM_POOL\"
 JOIN
-  WM_RESOURCEPLAN
+  \"WM_RESOURCEPLAN\"
 ON
-  WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+  \"WM_POOL\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\""
 );
 
 CREATE TABLE IF NOT EXISTS `WM_POOLS_TO_TRIGGERS` (
@@ -1011,21 +1011,21 @@ TBLPROPERTIES (
 "hive.sql.database.type" = "METASTORE",
 "hive.sql.query" =
 "SELECT
-  WM_RESOURCEPLAN.NAME RP_NAME,
-  WM_POOL.PATH POOL_PATH,
-  WM_TRIGGER.NAME TRIGGER_NAME
-FROM WM_POOL_TO_TRIGGER
-  JOIN WM_POOL ON WM_POOL_TO_TRIGGER.POOL_ID = WM_POOL.POOL_ID
-  JOIN WM_TRIGGER ON WM_POOL_TO_TRIGGER.TRIGGER_ID = WM_TRIGGER.TRIGGER_ID
-  JOIN WM_RESOURCEPLAN ON WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID
+  \"WM_RESOURCEPLAN\".\"NAME\" AS RP_NAME,
+  \"WM_POOL\".\"PATH\" AS POOL_PATH,
+  \"WM_TRIGGER\".\"NAME\" AS TRIGGER_NAME
+FROM \"WM_POOL_TO_TRIGGER\"
+  JOIN \"WM_POOL\" ON \"WM_POOL_TO_TRIGGER\".\"POOL_ID\" = 
\"WM_POOL\".\"POOL_ID\"
+  JOIN \"WM_TRIGGER\" ON \"WM_POOL_TO_TRIGGER\".\"TRIGGER_ID\" = 
\"WM_TRIGGER\".\"TRIGGER_ID\"
+  JOIN \"WM_RESOURCEPLAN\" ON \"WM_POOL\".\"RP_ID\" = 
\"WM_RESOURCEPLAN\".\"RP_ID\"
 UNION
 SELECT
-  WM_RESOURCEPLAN.NAME RP_NAME,
-  '<unmanaged queries>' POOL_PATH,
-  WM_TRIGGER.NAME TRIGGER_NAME
-FROM WM_TRIGGER
-  JOIN WM_RESOURCEPLAN ON WM_TRIGGER.RP_ID = WM_RESOURCEPLAN.RP_ID
-WHERE WM_TRIGGER.IS_IN_UNMANAGED = 1
+  \"WM_RESOURCEPLAN\".\"NAME\" AS RP_NAME,
+  '<unmanaged queries>' AS POOL_PATH,
+  \"WM_TRIGGER\".\"NAME\" AS TRIGGER_NAME
+FROM \"WM_TRIGGER\"
+  JOIN \"WM_RESOURCEPLAN\" ON \"WM_TRIGGER\".\"RP_ID\" = 
\"WM_RESOURCEPLAN\".\"RP_ID\"
+WHERE CAST(\"WM_TRIGGER\".\"IS_IN_UNMANAGED\" AS CHAR) IN ('1', 't')
 "
 );
 
@@ -1041,14 +1041,14 @@ TBLPROPERTIES (
 "hive.sql.database.type" = "METASTORE",
 "hive.sql.query" =
 "SELECT
-  WM_RESOURCEPLAN.NAME,
-  ENTITY_TYPE,
-  ENTITY_NAME,
-  case when WM_POOL.PATH is null then '<unmanaged>' else WM_POOL.PATH end,
-  ORDERING
-FROM WM_MAPPING
-JOIN WM_RESOURCEPLAN ON WM_MAPPING.RP_ID = WM_RESOURCEPLAN.RP_ID
-LEFT OUTER JOIN WM_POOL ON WM_POOL.POOL_ID = WM_MAPPING.POOL_ID
+  \"WM_RESOURCEPLAN\".\"NAME\",
+  \"ENTITY_TYPE\",
+  \"ENTITY_NAME\",
+  case when \"WM_POOL\".\"PATH\" is null then '<unmanaged>' else 
\"WM_POOL\".\"PATH\" end,
+  \"ORDERING\"
+FROM \"WM_MAPPING\"
+JOIN \"WM_RESOURCEPLAN\" ON \"WM_MAPPING\".\"RP_ID\" = 
\"WM_RESOURCEPLAN\".\"RP_ID\"
+LEFT OUTER JOIN \"WM_POOL\" ON \"WM_POOL\".\"POOL_ID\" = 
\"WM_MAPPING\".\"POOL_ID\"
 "
 );
 
@@ -1067,16 +1067,22 @@ CREATE VIEW IF NOT EXISTS `SCHEMATA`
   `DEFAULT_CHARACTER_SET_NAME`,
   `SQL_PATH`
 ) AS
-SELECT
+SELECT DISTINCT
   'default',
-  `NAME`,
-  `OWNER_NAME`,
+  D.`NAME`,
+  D.`OWNER_NAME`,
   cast(null as string),
   cast(null as string),
   cast(null as string),
   `DB_LOCATION_URI`
 FROM
-  sys.DBS;
+  `sys`.`DBS` D, `sys`.`TBLS` T, `sys`.`TBL_PRIVS` P
+WHERE
+  NOT restrict_information_schema() OR
+  D.`DB_ID` = T.`DB_ID`
+  AND T.`TBL_ID` = P.`TBL_ID`
+  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR 
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'));
 
 CREATE VIEW IF NOT EXISTS `TABLES`
 (
@@ -1093,7 +1099,7 @@ CREATE VIEW IF NOT EXISTS `TABLES`
   `IS_TYPED`,
   `COMMIT_ACTION`
 ) AS
-SELECT
+SELECT DISTINCT
   'default',
   D.NAME,
   T.TBL_NAME,
@@ -1107,9 +1113,13 @@ SELECT
   'NO',
   cast(null as string)
 FROM
-  `sys`.`TBLS` T, `sys`.`DBS` D
+  `sys`.`TBLS` T, `sys`.`DBS` D, `sys`.`TBL_PRIVS` P
 WHERE
-  D.`DB_ID` = T.`DB_ID`;
+  D.`DB_ID` = T.`DB_ID`
+  AND (NOT restrict_information_schema() OR T.`TBL_ID` = P.`TBL_ID`
+  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR 
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
+  AND P.`TBL_PRIV`='SELECT');
 
 CREATE VIEW IF NOT EXISTS `TABLE_PRIVILEGES`
 (
@@ -1122,9 +1132,9 @@ CREATE VIEW IF NOT EXISTS `TABLE_PRIVILEGES`
   `IS_GRANTABLE`,
   `WITH_HIERARCHY`
 ) AS
-SELECT
-  `GRANTOR`,
-  `PRINCIPAL_NAME`,
+SELECT DISTINCT
+  P.`GRANTOR`,
+  P.`PRINCIPAL_NAME`,
   'default',
   D.`NAME`,
   T.`TBL_NAME`,
@@ -1132,12 +1142,18 @@ SELECT
   IF (P.`GRANT_OPTION` == 0, 'NO', 'YES'),
   'NO'
 FROM
-  sys.`TBL_PRIVS` P,
-  sys.`TBLS` T,
-  sys.`DBS` D
+  `sys`.`TBL_PRIVS` P,
+  `sys`.`TBLS` T,
+  `sys`.`DBS` D,
+  `sys`.`TBL_PRIVS` P2
 WHERE
-  P.TBL_ID = T.TBL_ID
-  AND T.DB_ID = D.DB_ID;
+  P.`TBL_ID` = T.`TBL_ID`
+  AND T.`DB_ID` = D.`DB_ID`
+  AND (NOT restrict_information_schema() OR
+  P.`TBL_ID` = P2.`TBL_ID` AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND 
P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE`
+  AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR 
P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
+  AND P2.`TBL_PRIV`='SELECT');
 
 CREATE VIEW IF NOT EXISTS `COLUMNS`
 (
@@ -1189,7 +1205,7 @@ CREATE VIEW IF NOT EXISTS `COLUMNS`
   `DECLARED_NUMERIC_PRECISION`,
   `DECLARED_NUMERIC_SCALE`
 ) AS
-SELECT
+SELECT DISTINCT
   'default',
   D.NAME,
   T.TBL_NAME,
@@ -1278,14 +1294,21 @@ SELECT
        WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
        ELSE null END
 FROM
-  sys.`COLUMNS_V2` C,
-  sys.`SDS` S,
-  sys.`TBLS` T,
-  sys.`DBS` D
+  `sys`.`COLUMNS_V2` C,
+  `sys`.`SDS` S,
+  `sys`.`TBLS` T,
+  `sys`.`DBS` D,
+  `sys`.`TBL_COL_PRIVS` P
 WHERE
   S.`SD_ID` = T.`SD_ID`
   AND T.`DB_ID` = D.`DB_ID`
-  AND C.`CD_ID` = S.`CD_ID`;
+  AND C.`CD_ID` = S.`CD_ID`
+  AND (NOT restrict_information_schema() OR
+  T.`TBL_ID` = P.`TBL_ID`
+  AND C.`COLUMN_NAME` = P.`COLUMN_NAME`
+  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR 
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
+  AND P.`TBL_COL_PRIV`='SELECT');
 
 CREATE VIEW IF NOT EXISTS `COLUMN_PRIVILEGES`
 (
@@ -1298,27 +1321,30 @@ CREATE VIEW IF NOT EXISTS `COLUMN_PRIVILEGES`
   `PRIVILEGE_TYPE`,
   `IS_GRANTABLE`
 ) AS
-SELECT
-  `GRANTOR`,
-  `PRINCIPAL_NAME`,
+SELECT DISTINCT
+  P.`GRANTOR`,
+  P.`PRINCIPAL_NAME`,
   'default',
   D.`NAME`,
   T.`TBL_NAME`,
-  C.`COLUMN_NAME`,
+  P.`COLUMN_NAME`,
   P.`TBL_COL_PRIV`,
   IF (P.`GRANT_OPTION` == 0, 'NO', 'YES')
 FROM
-  sys.`TBL_COL_PRIVS` P,
-  sys.`TBLS` T,
-  sys.`DBS` D,
-  sys.`COLUMNS_V2` C,
-  sys.`SDS` S
+  `sys`.`TBL_COL_PRIVS` P,
+  `sys`.`TBLS` T,
+  `sys`.`DBS` D,
+  `sys`.`SDS` S,
+  `sys`.`TBL_PRIVS` P2
 WHERE
   S.`SD_ID` = T.`SD_ID`
   AND T.`DB_ID` = D.`DB_ID`
   AND P.`TBL_ID` = T.`TBL_ID`
-  AND P.`COLUMN_NAME` = C.`COLUMN_NAME`
-  AND C.`CD_ID` = S.`CD_ID`;
+  AND (NOT restrict_information_schema() OR
+  P.`TBL_ID` = P2.`TBL_ID` AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND 
P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE`
+  AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR 
P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
+  AND P2.`TBL_PRIV`='SELECT');
 
 CREATE VIEW IF NOT EXISTS `VIEWS`
 (
@@ -1333,7 +1359,7 @@ CREATE VIEW IF NOT EXISTS `VIEWS`
   `IS_TRIGGER_DELETABLE`,
   `IS_TRIGGER_INSERTABLE_INTO`
 ) AS
-SELECT
+SELECT DISTINCT
   'default',
   D.NAME,
   T.TBL_NAME,
@@ -1346,7 +1372,13 @@ SELECT
   false
 FROM
   `sys`.`DBS` D,
-  `sys`.`TBLS` T
+  `sys`.`TBLS` T,
+  `sys`.`TBL_PRIVS` P
 WHERE
-   D.`DB_ID` = T.`DB_ID` AND
-   length(T.VIEW_ORIGINAL_TEXT) > 0;
+  D.`DB_ID` = T.`DB_ID`
+  AND length(T.VIEW_ORIGINAL_TEXT) > 0
+  AND (NOT restrict_information_schema() OR
+  T.`TBL_ID` = P.`TBL_ID`
+  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR 
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
+  AND P.`TBL_PRIV`='SELECT');

http://git-wip-us.apache.org/repos/asf/hive/blob/61ec445c/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java 
b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
index bb91eea..4611ce9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/FunctionRegistry.java
@@ -356,7 +356,9 @@ public final class FunctionRegistry {
     system.registerGenericUDF("current_date", GenericUDFCurrentDate.class);
     system.registerGenericUDF("current_timestamp", 
GenericUDFCurrentTimestamp.class);
     system.registerGenericUDF("current_user", GenericUDFCurrentUser.class);
+    system.registerGenericUDF("current_groups", GenericUDFCurrentGroups.class);
     system.registerGenericUDF("logged_in_user", GenericUDFLoggedInUser.class);
+    system.registerGenericUDF("restrict_information_schema", 
GenericUDFRestrictInformationSchema.class);
 
     system.registerGenericUDF("isnull", GenericUDFOPNull.class);
     system.registerGenericUDF("isnotnull", GenericUDFOPNotNull.class);

http://git-wip-us.apache.org/repos/asf/hive/blob/61ec445c/ql/src/java/org/apache/hadoop/hive/ql/metadata/JarUtils.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/JarUtils.java 
b/ql/src/java/org/apache/hadoop/hive/ql/metadata/JarUtils.java
new file mode 100644
index 0000000..113a6e3
--- /dev/null
+++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/JarUtils.java
@@ -0,0 +1,344 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with this
+ * work for additional information regarding copyright ownership. The ASF
+ * licenses this file to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations 
under
+ * the License.
+ */
+package org.apache.hadoop.hive.ql.metadata;
+
+import java.io.BufferedOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.net.URL;
+import java.net.URLDecoder;
+import java.text.MessageFormat;
+import java.util.Enumeration;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.jar.JarFile;
+import java.util.jar.JarOutputStream;
+import java.util.jar.Manifest;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipFile;
+import java.util.zip.ZipOutputStream;
+
+import org.apache.commons.io.IOUtils;
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hive.common.JavaUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * Utilities to ship containing jars to the backend. Borrowed from
+ * Hbase TableMapReduceUtil.addDependencyJars
+ */
+public class JarUtils {
+  private static final Logger log = LoggerFactory.getLogger(JarUtils.class);
+
+  public static void addDependencyJars(Configuration conf, List<Class<?>> 
classes) throws IOException {
+    FileSystem localFs = FileSystem.getLocal(conf);
+    Set<String> jars = new HashSet<String>();
+    // Add jars that are already in the tmpjars variable
+    jars.addAll(conf.getStringCollection("tmpjars"));
+
+    // add jars as we find them to a map of contents jar name so that we can
+    // avoid
+    // creating new jars for classes that have already been packaged.
+    Map<String,String> packagedClasses = new HashMap<String,String>();
+
+    // Add jars containing the specified classes
+    for (Class<?> clazz : classes) {
+      if (clazz == null) {
+        continue;
+      }
+      Path path = findOrCreateJar(clazz, localFs, packagedClasses);
+      if (path == null) {
+        log.warn("Could not find jar for class " + clazz + " in order to ship 
it to the cluster.");
+        continue;
+      }
+      if (!localFs.exists(path)) {
+        log.warn("Could not validate jar file " + path + " for class " + 
clazz);
+        continue;
+      }
+      jars.add(path.toString());
+    }
+    if (!jars.isEmpty()) {
+      conf.set("tmpjars", StringUtils.join(jars, ","));
+    }
+  }
+
+  /**
+   * If org.apache.hadoop.util.JarFinder is available (0.23+ hadoop), finds 
the Jar for a class or
+   * creates it if it doesn't exist. If the class is in a directory in the 
classpath, it creates a
+   * Jar on the fly with the contents of the directory and returns the path to 
that Jar. If a Jar is
+   * created, it is created in the system temporary directory. Otherwise, 
returns an existing jar
+   * that contains a class of the same name. Maintains a mapping from jar 
contents to the tmp jar
+   * created.
+   *
+   * @param my_class
+   *          the class to find.
+   * @param fs
+   *          the FileSystem with which to qualify the returned path.
+   * @param packagedClasses
+   *          a map of class name to path.
+   * @return a jar file that contains the class.
+   * @throws IOException
+   */
+  @SuppressWarnings("deprecation")
+  private static Path findOrCreateJar(Class<?> my_class, FileSystem fs,
+      Map<String,String> packagedClasses) throws IOException {
+    // attempt to locate an existing jar for the class.
+    String jar = findContainingJar(my_class, packagedClasses);
+    if (StringUtils.isEmpty(jar)) {
+      jar = getJar(my_class);
+      updateMap(jar, packagedClasses);
+    }
+
+    if (StringUtils.isEmpty(jar)) {
+      return null;
+    }
+
+    log.debug("For class {}, using jar {}", my_class.getName(), jar);
+    return new Path(jar).makeQualified(fs);
+  }
+
+  /**
+   * Add entries to <code>packagedClasses</code> corresponding to class files 
contained in
+   * <code>jar</code>.
+   *
+   * @param jar
+   *          The jar who's content to list.
+   * @param packagedClasses
+   *          map[class -> jar]
+   */
+  private static void updateMap(String jar, Map<String,String> 
packagedClasses) throws IOException {
+    if (StringUtils.isEmpty(jar)) {
+      return;
+    }
+    try (ZipFile zip = new ZipFile(jar)){
+      for (Enumeration<? extends ZipEntry> iter = zip.entries(); 
iter.hasMoreElements();) {
+        ZipEntry entry = iter.nextElement();
+        if (entry.getName().endsWith("class")) {
+          packagedClasses.put(entry.getName(), jar);
+        }
+      }
+    }
+  }
+
+  /**
+   * Find a jar that contains a class of the same name, if any. It will return 
a jar file, even if
+   * that is not the first thing on the class path that has a class with the 
same name. Looks first
+   * on the classpath and then in the <code>packagedClasses</code> map.
+   *
+   * @param my_class
+   *          the class to find.
+   * @return a jar file that contains the class, or null.
+   * @throws IOException
+   */
+  private static String findContainingJar(Class<?> my_class, 
Map<String,String> packagedClasses)
+      throws IOException {
+    ClassLoader loader = my_class.getClassLoader();
+    String class_file = my_class.getName().replaceAll("\\.", "/") + ".class";
+
+    // first search the classpath
+    for (Enumeration<URL> itr = loader.getResources(class_file); 
itr.hasMoreElements();) {
+      URL url = itr.nextElement();
+      if ("jar".equals(url.getProtocol())) {
+        String toReturn = url.getPath();
+        if (toReturn.startsWith("file:")) {
+          toReturn = toReturn.substring("file:".length());
+        }
+        // URLDecoder is a misnamed class, since it actually decodes
+        // x-www-form-urlencoded MIME type rather than actual
+        // URL encoding (which the file path has). Therefore it would
+        // decode +s to ' 's which is incorrect (spaces are actually
+        // either unencoded or encoded as "%20"). Replace +s first, so
+        // that they are kept sacred during the decoding process.
+        toReturn = toReturn.replaceAll("\\+", "%2B");
+        toReturn = URLDecoder.decode(toReturn, "UTF-8");
+        return toReturn.replaceAll("!.*$", "");
+      }
+    }
+
+    // now look in any jars we've packaged using JarFinder. Returns null
+    // when
+    // no jar is found.
+    return packagedClasses.get(class_file);
+  }
+
+  /**
+   * Invoke 'getJar' on a JarFinder implementation. Useful for some job 
configuration contexts
+   * (HBASE-8140) and also for testing on MRv2. First check if we have 
HADOOP-9426. Lacking that,
+   * fall back to the backport.
+   *
+   * @param my_class
+   *          the class to find.
+   * @return a jar file that contains the class, or null.
+   */
+  private static String getJar(Class<?> my_class) {
+    String ret = null;
+    String hadoopJarFinder = "org.apache.hadoop.util.JarFinder";
+    Class<?> jarFinder = null;
+    try {
+      log.debug("Looking for: {}", hadoopJarFinder);
+      jarFinder = JavaUtils.loadClass(hadoopJarFinder);
+      log.debug("Found: {}", hadoopJarFinder);
+      Method getJar = jarFinder.getMethod("getJar", Class.class);
+      ret = (String) getJar.invoke(null, my_class);
+    } catch (ClassNotFoundException e) {
+      log.debug("Using backported JarFinder.");
+      ret = jarFinderGetJar(my_class);
+    } catch (InvocationTargetException e) {
+      // function was properly called, but threw it's own exception.
+      // Unwrap it
+      // and pass it on.
+      throw new RuntimeException(e.getCause());
+    } catch (Exception e) {
+      // toss all other exceptions, related to reflection failure
+      throw new RuntimeException("getJar invocation failed.", e);
+    }
+
+    return ret;
+  }
+
+  /**
+   * Returns the full path to the Jar containing the class. It always return a 
JAR.
+   *
+   * @param klass
+   *          class.
+   *
+   * @return path to the Jar containing the class.
+   */
+  @SuppressWarnings("rawtypes")
+  public static String jarFinderGetJar(Class klass) {
+    Preconditions.checkNotNull(klass, "klass");
+    ClassLoader loader = klass.getClassLoader();
+    if (loader != null) {
+      String class_file = klass.getName().replaceAll("\\.", "/") + ".class";
+      try {
+        for (Enumeration itr = loader.getResources(class_file); 
itr.hasMoreElements();) {
+          URL url = (URL) itr.nextElement();
+          String path = url.getPath();
+          if (path.startsWith("file:")) {
+            path = path.substring("file:".length());
+          }
+          path = URLDecoder.decode(path, "UTF-8");
+          if ("jar".equals(url.getProtocol())) {
+            path = URLDecoder.decode(path, "UTF-8");
+            return path.replaceAll("!.*$", "");
+          } else if ("file".equals(url.getProtocol())) {
+            String klassName = klass.getName();
+            klassName = klassName.replace(".", "/") + ".class";
+            path = path.substring(0, path.length() - klassName.length());
+            File baseDir = new File(path);
+            File testDir = new File(System.getProperty("test.build.dir", 
"target/test-dir"));
+            testDir = testDir.getAbsoluteFile();
+            if (!testDir.exists()) {
+              testDir.mkdirs();
+            }
+            File tempJar = File.createTempFile("hadoop-", "", testDir);
+            tempJar = new File(tempJar.getAbsolutePath() + ".jar");
+            createJar(baseDir, tempJar);
+            return tempJar.getAbsolutePath();
+          }
+        }
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    }
+    return null;
+  }
+
+  private static void copyToZipStream(InputStream is, ZipEntry entry, 
ZipOutputStream zos)
+      throws IOException {
+    zos.putNextEntry(entry);
+    IOUtils.copy(is, zos);
+    is.close();
+    zos.closeEntry();
+  }
+
+  public static void jarDir(File dir, String relativePath, ZipOutputStream 
zos) throws IOException {
+    Preconditions.checkNotNull(relativePath, "relativePath");
+    Preconditions.checkNotNull(zos, "zos");
+
+    // by JAR spec, if there is a manifest, it must be the first entry in
+    // the
+    // ZIP.
+    File manifestFile = new File(dir, JarFile.MANIFEST_NAME);
+    ZipEntry manifestEntry = new ZipEntry(JarFile.MANIFEST_NAME);
+    if (!manifestFile.exists()) {
+      zos.putNextEntry(manifestEntry);
+      new Manifest().write(new BufferedOutputStream(zos));
+      zos.closeEntry();
+    } else {
+      InputStream is = new FileInputStream(manifestFile);
+      copyToZipStream(is, manifestEntry, zos);
+    }
+    zos.closeEntry();
+    zipDir(dir, relativePath, zos, true);
+    zos.close();
+  }
+
+  private static void zipDir(File dir, String relativePath, ZipOutputStream 
zos, boolean start)
+      throws IOException {
+    String[] dirList = dir.list();
+    for (String aDirList : dirList) {
+      File f = new File(dir, aDirList);
+      if (!f.isHidden()) {
+        if (f.isDirectory()) {
+          if (!start) {
+            ZipEntry dirEntry = new ZipEntry(relativePath + f.getName() + "/");
+            zos.putNextEntry(dirEntry);
+            zos.closeEntry();
+          }
+          String filePath = f.getPath();
+          File file = new File(filePath);
+          zipDir(file, relativePath + f.getName() + "/", zos, false);
+        } else {
+          String path = relativePath + f.getName();
+          if (!path.equals(JarFile.MANIFEST_NAME)) {
+            ZipEntry anEntry = new ZipEntry(path);
+            InputStream is = new FileInputStream(f);
+            copyToZipStream(is, anEntry, zos);
+          }
+        }
+      }
+    }
+  }
+
+  private static void createJar(File dir, File jarFile) throws IOException {
+    Preconditions.checkNotNull(dir, "dir");
+    Preconditions.checkNotNull(jarFile, "jarFile");
+    File jarDir = jarFile.getParentFile();
+    if (!jarDir.exists()) {
+      if (!jarDir.mkdirs()) {
+        throw new IOException(MessageFormat.format("could not create dir 
[{0}]", jarDir));
+      }
+    }
+    JarOutputStream zos = new JarOutputStream(new FileOutputStream(jarFile));
+    jarDir(dir, StringUtils.EMPTY, zos);
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/61ec445c/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java
index 60d9dc1..0e644b4 100644
--- 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/HiveAuthorizationProvider.java
@@ -28,6 +28,8 @@ import org.apache.hadoop.hive.ql.metadata.HiveException;
 import org.apache.hadoop.hive.ql.metadata.Partition;
 import org.apache.hadoop.hive.ql.metadata.Table;
 import org.apache.hadoop.hive.ql.security.HiveAuthenticationProvider;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider;
 
 /**
  * Hive's pluggable authorization provider interface
@@ -124,4 +126,11 @@ public interface HiveAuthorizationProvider extends 
Configurable{
       Privilege[] readRequiredPriv, Privilege[] writeRequiredPriv)
       throws HiveException, AuthorizationException;
 
+  /**
+   * @return HivePolicyProvider instance (expected to be a singleton)
+   * @throws HiveAuthzPluginException
+   */
+  default HivePolicyProvider getHivePolicyProvider() throws 
HiveAuthzPluginException {
+    return null;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/61ec445c/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchonizer.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchonizer.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchonizer.java
new file mode 100644
index 0000000..9b2e6cd
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchonizer.java
@@ -0,0 +1,204 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization;
+
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.curator.framework.recipes.leader.LeaderLatch;
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
+import org.apache.hadoop.hive.metastore.IMetaStoreClient;
+import org.apache.hadoop.hive.metastore.api.FieldSchema;
+import org.apache.hadoop.hive.metastore.api.HiveObjectPrivilege;
+import org.apache.hadoop.hive.metastore.api.HiveObjectRef;
+import org.apache.hadoop.hive.metastore.api.HiveObjectType;
+import org.apache.hadoop.hive.metastore.api.PrincipalType;
+import org.apache.hadoop.hive.metastore.api.PrivilegeBag;
+import org.apache.hadoop.hive.metastore.api.PrivilegeGrantInfo;
+import org.apache.hadoop.hive.metastore.api.Table;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveMetastoreClientFactoryImpl;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePolicyProvider;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HivePrivilegeObject.HivePrivilegeObjectType;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveResourceACLs;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * PrivilegeSynchonizer defines a thread to synchronize privileges from
+ * external authorizer to Hive metastore.
+ */
+public class PrivilegeSynchonizer implements Runnable {
+
+  private static final Logger LOG = 
LoggerFactory.getLogger(PrivilegeSynchonizer.class);
+  public static final String GRANTOR = "ranger";
+  private IMetaStoreClient hiveClient;
+  private LeaderLatch privilegeSynchonizerLatch;
+  private HiveConf hiveConf;
+  private HiveAuthorizer authorizer;
+
+  public PrivilegeSynchonizer(LeaderLatch privilegeSynchonizerLatch, 
HiveAuthorizer authorizer, HiveConf hiveConf) {
+    try {
+      hiveClient = new 
HiveMetastoreClientFactoryImpl().getHiveMetastoreClient();
+    } catch (HiveAuthzPluginException e) {
+      throw new RuntimeException("Error creating getHiveMetastoreClient", e);
+    }
+    this.privilegeSynchonizerLatch = privilegeSynchonizerLatch;
+    this.authorizer = authorizer;
+    this.hiveConf = hiveConf;
+  }
+
+  private void addACLsToBag(
+      Map<String, Map<HiveResourceACLs.Privilege, 
HiveResourceACLs.AccessResult>> principalAclsMap,
+      PrivilegeBag privBag, HiveObjectType objectType, String dbName, String 
tblName, String columnName,
+      PrincipalType principalType) {
+
+    for (Map.Entry<String, Map<HiveResourceACLs.Privilege, 
HiveResourceACLs.AccessResult>> principalAcls
+        : principalAclsMap.entrySet()) {
+      String principal = principalAcls.getKey();
+      for (Map.Entry<HiveResourceACLs.Privilege, 
HiveResourceACLs.AccessResult> acl : principalAcls.getValue()
+          .entrySet()) {
+        if (acl.getValue() == HiveResourceACLs.AccessResult.ALLOWED) {
+          switch (objectType) {
+          case DATABASE:
+            privBag.addToPrivileges(
+                new HiveObjectPrivilege(new 
HiveObjectRef(HiveObjectType.DATABASE, dbName, null, null, null), principal,
+                    principalType, new 
PrivilegeGrantInfo(acl.getKey().toString(),
+                        (int) (System.currentTimeMillis() / 1000), GRANTOR, 
PrincipalType.USER, false)));
+            break;
+          case TABLE:
+            privBag.addToPrivileges(
+                new HiveObjectPrivilege(new 
HiveObjectRef(HiveObjectType.TABLE, dbName, tblName, null, null), principal,
+                    principalType, new 
PrivilegeGrantInfo(acl.getKey().toString(),
+                        (int) (System.currentTimeMillis() / 1000), GRANTOR, 
PrincipalType.USER, false)));
+            break;
+          case COLUMN:
+            privBag.addToPrivileges(
+                new HiveObjectPrivilege(new 
HiveObjectRef(HiveObjectType.COLUMN, dbName, tblName, null, columnName),
+                    principal, principalType, new 
PrivilegeGrantInfo(acl.getKey().toString(),
+                        (int) (System.currentTimeMillis() / 1000), GRANTOR, 
PrincipalType.USER, false)));
+            break;
+          default:
+            throw new RuntimeException("Get unknown object type " + 
objectType);
+          }
+        }
+      }
+    }
+  }
+
+  private HiveObjectRef getObjToRefresh(HiveObjectType type, String dbName, 
String tblName) throws Exception {
+    HiveObjectRef objToRefresh = null;
+    switch (type) {
+    case DATABASE:
+      objToRefresh = new HiveObjectRef(HiveObjectType.DATABASE, dbName, null, 
null, null);
+      break;
+    case TABLE:
+      objToRefresh = new HiveObjectRef(HiveObjectType.TABLE, dbName, tblName, 
null, null);
+      break;
+    case COLUMN:
+      objToRefresh = new HiveObjectRef(HiveObjectType.COLUMN, dbName, tblName, 
null, null);
+      break;
+    default:
+      throw new RuntimeException("Get unknown object type " + type);
+    }
+    return objToRefresh;
+  }
+
+  private void addGrantPrivilegesToBag(HivePolicyProvider policyProvider, 
PrivilegeBag privBag, HiveObjectType type,
+      String dbName, String tblName, String columnName) throws Exception {
+
+    HiveResourceACLs objectAcls = null;
+
+    switch (type) {
+    case DATABASE:
+      objectAcls = policyProvider
+          .getResourceACLs(new 
HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, dbName, null));
+      break;
+
+    case TABLE:
+      objectAcls = policyProvider
+          .getResourceACLs(new 
HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, dbName, tblName));
+      break;
+
+    case COLUMN:
+      objectAcls = policyProvider
+          .getResourceACLs(new 
HivePrivilegeObject(HivePrivilegeObjectType.COLUMN, dbName, tblName, null, 
columnName));
+      break;
+
+    default:
+      throw new RuntimeException("Get unknown object type " + type);
+    }
+
+    if (objectAcls == null) {
+      return;
+    }
+
+    addACLsToBag(objectAcls.getUserPermissions(), privBag, type, dbName, 
tblName, columnName, PrincipalType.USER);
+    addACLsToBag(objectAcls.getGroupPermissions(), privBag, type, dbName, 
tblName, columnName, PrincipalType.GROUP);
+  }
+
+  @Override
+  public void run() {
+    while (true) {
+      try {
+        HivePolicyProvider policyProvider = authorizer.getHivePolicyProvider();
+        long interval = HiveConf.getTimeVar(hiveConf, 
ConfVars.HIVE_PRIVILEGE_SYNCHRONIZER_INTERVAL, TimeUnit.SECONDS);
+        if (hiveConf.getBoolVar(ConfVars.HIVE_PRIVILEGE_SYNCHRONIZER)) {
+          if (!privilegeSynchonizerLatch.await(interval, TimeUnit.SECONDS)) {
+            continue;
+          }
+          LOG.debug("Start synchonize privilege");
+          for (String dbName : hiveClient.getAllDatabases()) {
+            HiveObjectRef dbToRefresh = 
getObjToRefresh(HiveObjectType.DATABASE, dbName, null);
+            PrivilegeBag grantDatabaseBag = new PrivilegeBag();
+            addGrantPrivilegesToBag(policyProvider, grantDatabaseBag, 
HiveObjectType.DATABASE, dbName, null, null);
+            hiveClient.refresh_privileges(dbToRefresh, grantDatabaseBag);
+
+            for (String tblName : hiveClient.getAllTables(dbName)) {
+              HiveObjectRef tableToRefresh = 
getObjToRefresh(HiveObjectType.TABLE, dbName, tblName);
+              PrivilegeBag grantTableBag = new PrivilegeBag();
+              addGrantPrivilegesToBag(policyProvider, grantTableBag, 
HiveObjectType.TABLE, dbName, tblName, null);
+              hiveClient.refresh_privileges(tableToRefresh, grantTableBag);
+
+              HiveObjectRef tableOfColumnsToRefresh = 
getObjToRefresh(HiveObjectType.COLUMN, dbName, tblName);
+              PrivilegeBag grantColumnBag = new PrivilegeBag();
+              Table tbl = hiveClient.getTable(dbName, tblName);
+              for (FieldSchema fs : tbl.getPartitionKeys()) {
+                addGrantPrivilegesToBag(policyProvider, grantColumnBag, 
HiveObjectType.COLUMN, dbName, tblName,
+                    fs.getName());
+              }
+              for (FieldSchema fs : tbl.getSd().getCols()) {
+                addGrantPrivilegesToBag(policyProvider, grantColumnBag, 
HiveObjectType.COLUMN, dbName, tblName,
+                    fs.getName());
+              }
+              hiveClient.refresh_privileges(tableOfColumnsToRefresh, 
grantColumnBag);
+            }
+          }
+        }
+        // Wait if no exception happens, otherwise, retry immediately
+        Thread.sleep(interval * 1000);
+        LOG.debug("Success synchonize privilege");
+      } catch (Exception e) {
+        LOG.error("Error initializing PrivilegeSynchonizer: " + 
e.getMessage(), e);
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/61ec445c/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveResourceACLsImpl.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveResourceACLsImpl.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveResourceACLsImpl.java
new file mode 100644
index 0000000..6804d16
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/security/authorization/plugin/HiveResourceACLsImpl.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hive.ql.security.authorization.plugin;
+
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * Default implementation of {@link HiveResourceACLs}.
+ */
+public class HiveResourceACLsImpl implements HiveResourceACLs {
+
+  Map<String, Map<Privilege, AccessResult>> userPermissions = new 
HashMap<String, Map<Privilege, AccessResult>>();
+  Map<String, Map<Privilege, AccessResult>> groupPermissions = new 
HashMap<String, Map<Privilege, AccessResult>>();
+
+  @Override
+  public Map<String, Map<Privilege, AccessResult>> getUserPermissions() {
+    return userPermissions;
+  }
+
+  @Override
+  public Map<String, Map<Privilege, AccessResult>> getGroupPermissions() {
+    return groupPermissions;
+  }
+
+  public void addUserEntry(String user, Privilege priv, AccessResult result) {
+    if (userPermissions.containsKey(user)) {
+      userPermissions.get(user).put(priv, result);
+    } else {
+      Map<Privilege, AccessResult> entry = new EnumMap<Privilege, 
AccessResult>(Privilege.class);
+      entry.put(priv, result);
+      userPermissions.put(user, entry);
+    }
+  }
+
+  public void addGroupEntry(String group, Privilege priv, AccessResult result) 
{
+    if (groupPermissions.containsKey(group)) {
+      groupPermissions.get(group).put(priv, result);
+    } else {
+      Map<Privilege, AccessResult> entry = new EnumMap<Privilege, 
AccessResult>(Privilege.class);
+      entry.put(priv, result);
+      groupPermissions.put(group, entry);
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/61ec445c/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java 
b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
index 60b63d4..5198574 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/session/SessionState.java
@@ -1257,6 +1257,13 @@ public class SessionState {
     return null;
   }
 
+  public static List<String> getGroupsFromAuthenticator() {
+    if (SessionState.get() != null && SessionState.get().getAuthenticator() != 
null) {
+      return SessionState.get().getAuthenticator().getGroupNames();
+    }
+    return null;
+  }
+
   static void validateFiles(List<String> newFiles) throws 
IllegalArgumentException {
     SessionState ss = SessionState.get();
     Configuration conf = (ss == null) ? new Configuration() : ss.getConf();

http://git-wip-us.apache.org/repos/asf/hive/blob/61ec445c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentGroups.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentGroups.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentGroups.java
new file mode 100644
index 0000000..14176fd
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFCurrentGroups.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.Text;
+
+/**
+ * UDF to get current group from authenticator. This function is not a 
deterministic function,
+ * but a runtime constant. The return value is constant within a query but can 
be different between queries.
+ */
+@UDFType(deterministic = false, runtimeConstant = true)
+@Description(name = "current_group", value = "_FUNC_() - Returns all groups 
the current user belongs to",
+    extended = "SessionState GroupsFromAuthenticator")
+public class GenericUDFCurrentGroups extends GenericUDF {
+  protected List<Text> currentGroups;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
+    if (arguments.length != 0) {
+      throw new UDFArgumentLengthException(
+          "The function CURRENT_GROUPS does not take any arguments, but found 
" + arguments.length);
+    }
+
+    if (currentGroups == null) {
+      List<String> sessGroupsFromAuth = 
SessionState.getGroupsFromAuthenticator();
+      if (sessGroupsFromAuth != null) {
+        currentGroups = new ArrayList<Text>();
+        for (String group : sessGroupsFromAuth) {
+          currentGroups.add(new Text(group));
+        }
+      }
+    }
+
+    return ObjectInspectorFactory.getStandardListObjectInspector(
+        PrimitiveObjectInspectorFactory.writableStringObjectInspector);
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    return currentGroups;
+  }
+
+  public List<Text> getCurrentGroups() {
+    return currentGroups;
+  }
+
+  public void setCurrentGroups(List<Text> currentGroups) {
+    this.currentGroups = currentGroups;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    return "CURRENT_GROUPS()";
+  }
+
+  @Override
+  public void copyToNewInstance(Object newInstance) throws 
UDFArgumentException {
+    super.copyToNewInstance(newInstance);
+    // Need to preserve currentGroups
+    GenericUDFCurrentGroups other = (GenericUDFCurrentGroups) newInstance;
+    if (this.currentGroups != null) {
+      if (currentGroups != null) {
+        other.currentGroups = new ArrayList<Text>();
+        for (Text group : currentGroups) {
+          other.currentGroups.add(new Text(group));
+        }
+      }
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/61ec445c/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRestrictInformationSchema.java
----------------------------------------------------------------------
diff --git 
a/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRestrictInformationSchema.java
 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRestrictInformationSchema.java
new file mode 100644
index 0000000..3eb0914
--- /dev/null
+++ 
b/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFRestrictInformationSchema.java
@@ -0,0 +1,129 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hive.ql.udf.generic;
+
+import java.util.List;
+
+import org.apache.hadoop.hive.conf.HiveConf;
+import org.apache.hadoop.hive.metastore.conf.MetastoreConf;
+import org.apache.hadoop.hive.ql.exec.Description;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
+import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
+import org.apache.hadoop.hive.ql.metadata.HiveException;
+import org.apache.hadoop.hive.ql.metadata.HiveUtils;
+import 
org.apache.hadoop.hive.ql.security.authorization.HiveMetastoreAuthorizationProvider;
+import org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthorizer;
+import 
org.apache.hadoop.hive.ql.security.authorization.plugin.HiveAuthzPluginException;
+import org.apache.hadoop.hive.ql.session.SessionState;
+import org.apache.hadoop.hive.ql.udf.UDFType;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import 
org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
+import org.apache.hadoop.io.BooleanWritable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * UDF to determine whether to enforce restriction of information schema.
+ * This is intended for internal usage only. This function is not a 
deterministic function,
+ * but a runtime constant. The return value is constant within a query but can 
be different between queries
+ */
+@UDFType(deterministic = false, runtimeConstant = true)
+@Description(name = "restrict_information_schema",
+    value = "_FUNC_() - Returns whether or not to enable information schema 
restriction. " +
+    "Currently it is enabled if either HS2 authorizer or metastore authorizer 
implements policy provider " +
+    "interface.")
+@NDV(maxNdv = 1)
+public class GenericUDFRestrictInformationSchema extends GenericUDF {
+  private static final Logger LOG = 
LoggerFactory.getLogger(GenericUDFRestrictInformationSchema.class.getName());
+  protected BooleanWritable enabled;
+
+  @Override
+  public ObjectInspector initialize(ObjectInspector[] arguments) throws 
UDFArgumentException {
+    if (arguments.length != 0) {
+      throw new UDFArgumentLengthException(
+          "The function RestrictInformationSchema does not take any arguments, 
but found " + arguments.length);
+    }
+
+    if (enabled == null) {
+      boolean enableHS2PolicyProvider = false;
+      boolean enableMetastorePolicyProvider = false;
+
+      HiveConf hiveConf = SessionState.getSessionConf();
+      HiveAuthorizer authorizer = SessionState.get().getAuthorizerV2();
+      try {
+        if (authorizer.getHivePolicyProvider() != null) {
+          enableHS2PolicyProvider = true;
+        }
+      } catch (HiveAuthzPluginException e) {
+        LOG.warn("Error getting HivePolicyProvider", e);
+      }
+
+      if (!enableHS2PolicyProvider) {
+        if (MetastoreConf.getVar(hiveConf, 
MetastoreConf.ConfVars.PRE_EVENT_LISTENERS) != null &&
+            !MetastoreConf.getVar(hiveConf, 
MetastoreConf.ConfVars.PRE_EVENT_LISTENERS).isEmpty() &&
+            HiveConf.getVar(hiveConf, 
HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER) != null) {
+          List<HiveMetastoreAuthorizationProvider> authorizerProviders;
+          try {
+            authorizerProviders = 
HiveUtils.getMetaStoreAuthorizeProviderManagers(
+              hiveConf, HiveConf.ConfVars.HIVE_METASTORE_AUTHORIZATION_MANAGER,
+              SessionState.get().getAuthenticator());
+            for (HiveMetastoreAuthorizationProvider authProvider : 
authorizerProviders) {
+              if (authProvider.getHivePolicyProvider() != null) {
+                enableMetastorePolicyProvider = true;
+                break;
+              }
+            }
+          } catch (HiveAuthzPluginException e) {
+            LOG.warn("Error getting HivePolicyProvider", e);
+          } catch (HiveException e) {
+            LOG.warn("Error instantiating 
hive.security.metastore.authorization.manager", e);
+          }
+        }
+      }
+
+      if (enableHS2PolicyProvider || enableMetastorePolicyProvider) {
+        enabled = new BooleanWritable(true);
+      } else {
+        enabled = new BooleanWritable(false);
+      }
+    }
+
+    return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
+  }
+
+  @Override
+  public Object evaluate(DeferredObject[] arguments) throws HiveException {
+    return enabled;
+  }
+
+  @Override
+  public String getDisplayString(String[] children) {
+    return "RESTRICT_INFORMATION_SCHEMA()";
+  }
+
+  @Override
+  public void copyToNewInstance(Object newInstance) throws 
UDFArgumentException {
+    super.copyToNewInstance(newInstance);
+    // Need to preserve enabled flag
+    GenericUDFRestrictInformationSchema other = 
(GenericUDFRestrictInformationSchema) newInstance;
+    if (this.enabled != null) {
+      other.enabled = new BooleanWritable(this.enabled.get());
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/hive/blob/61ec445c/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out 
b/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
index 39c9880..94fadf1 100644
--- a/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
+++ b/ql/src/test/results/clientpositive/llap/jdbc_handler.q.out
@@ -101,7 +101,7 @@ WHERE 100 < "kkey"
 FROM "SIMPLE_DERBY_TABLE"
 WHERE 100 < "kkey"
           Select Operator
-            expressions: kkey (type: int)
+            expressions: kkey (type: bigint)
             outputColumnNames: _col0
             ListSink
 

http://git-wip-us.apache.org/repos/asf/hive/blob/61ec445c/ql/src/test/results/clientpositive/llap/resourceplan.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/llap/resourceplan.q.out 
b/ql/src/test/results/clientpositive/llap/resourceplan.q.out
index 9850276..8e58b16 100644
--- a/ql/src/test/results/clientpositive/llap/resourceplan.q.out
+++ b/ql/src/test/results/clientpositive/llap/resourceplan.q.out
@@ -261,7 +261,7 @@ TBLPROPERTIES (
   \"OWNER_NAME\",
   \"OWNER_TYPE\"
 FROM
-  DBS"
+  \"DBS\""
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: SYS@DBS
@@ -285,7 +285,7 @@ TBLPROPERTIES (
   \"OWNER_NAME\",
   \"OWNER_TYPE\"
 FROM
-  DBS"
+  \"DBS\""
 )
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: SYS@DBS
@@ -1358,7 +1358,7 @@ TBLPROPERTIES (
   \"VIEW_EXPANDED_TEXT\",
   \"VIEW_ORIGINAL_TEXT\",
   \"IS_REWRITE_ENABLED\"
-FROM TBLS"
+FROM \"TBLS\""
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: SYS@TBLS
@@ -1395,7 +1395,7 @@ TBLPROPERTIES (
   \"VIEW_EXPANDED_TEXT\",
   \"VIEW_ORIGINAL_TEXT\",
   \"IS_REWRITE_ENABLED\"
-FROM TBLS"
+FROM \"TBLS\""
 )
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: SYS@TBLS
@@ -1416,7 +1416,7 @@ TBLPROPERTIES (
   \"DB_NAME\",
   \"TBL_NAME\",
   \"TXN_LIST\"
-FROM MV_CREATION_METADATA"
+FROM \"MV_CREATION_METADATA\""
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: SYS@MV_CREATION_METADATA
@@ -1437,7 +1437,7 @@ TBLPROPERTIES (
   \"DB_NAME\",
   \"TBL_NAME\",
   \"TXN_LIST\"
-FROM MV_CREATION_METADATA"
+FROM \"MV_CREATION_METADATA\""
 )
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: SYS@MV_CREATION_METADATA
@@ -1453,7 +1453,7 @@ TBLPROPERTIES (
 "SELECT
   \"MV_CREATION_METADATA_ID\",
   \"TBL_ID\"
-FROM MV_TABLES_USED"
+FROM \"MV_TABLES_USED\""
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: SYS@MV_TABLES_USED
@@ -1469,7 +1469,7 @@ TBLPROPERTIES (
 "SELECT
   \"MV_CREATION_METADATA_ID\",
   \"TBL_ID\"
-FROM MV_TABLES_USED"
+FROM \"MV_TABLES_USED\""
 )
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: SYS@MV_TABLES_USED
@@ -2152,16 +2152,16 @@ TBLPROPERTIES (
 "hive.sql.database.type" = "METASTORE",
 "hive.sql.query" =
 "SELECT
-  r.NAME RP_NAME,
-  t.NAME NAME,
-  TRIGGER_EXPRESSION,
-  ACTION_EXPRESSION
+  r.\"NAME\" AS RP_NAME,
+  t.\"NAME\" AS NAME,
+  \"TRIGGER_EXPRESSION\",
+  \"ACTION_EXPRESSION\"
 FROM
-  WM_TRIGGER t
+  \"WM_TRIGGER\" t
 JOIN
-  WM_RESOURCEPLAN r
+  \"WM_RESOURCEPLAN\" r
 ON
-  t.RP_ID = r.RP_ID"
+  t.\"RP_ID\" = r.\"RP_ID\""
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: SYS@WM_TRIGGERS
@@ -2177,16 +2177,16 @@ TBLPROPERTIES (
 "hive.sql.database.type" = "METASTORE",
 "hive.sql.query" =
 "SELECT
-  r.NAME RP_NAME,
-  t.NAME NAME,
-  TRIGGER_EXPRESSION,
-  ACTION_EXPRESSION
+  r.\"NAME\" AS RP_NAME,
+  t.\"NAME\" AS NAME,
+  \"TRIGGER_EXPRESSION\",
+  \"ACTION_EXPRESSION\"
 FROM
-  WM_TRIGGER t
+  \"WM_TRIGGER\" t
 JOIN
-  WM_RESOURCEPLAN r
+  \"WM_RESOURCEPLAN\" r
 ON
-  t.RP_ID = r.RP_ID"
+  t.\"RP_ID\" = r.\"RP_ID\""
 )
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: SYS@WM_TRIGGERS
@@ -2203,17 +2203,17 @@ TBLPROPERTIES (
 "hive.sql.database.type" = "METASTORE",
 "hive.sql.query" =
 "SELECT
-  WM_RESOURCEPLAN.NAME,
-  WM_POOL.PATH,
-  WM_POOL.ALLOC_FRACTION,
-  WM_POOL.QUERY_PARALLELISM,
-  WM_POOL.SCHEDULING_POLICY
+  \"WM_RESOURCEPLAN\".\"NAME\",
+  \"WM_POOL\".\"PATH\",
+  \"WM_POOL\".\"ALLOC_FRACTION\",
+  \"WM_POOL\".\"QUERY_PARALLELISM\",
+  \"WM_POOL\".\"SCHEDULING_POLICY\"
 FROM
-  WM_POOL
+  \"WM_POOL\"
 JOIN
-  WM_RESOURCEPLAN
+  \"WM_RESOURCEPLAN\"
 ON
-  WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+  \"WM_POOL\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\""
 )
 PREHOOK: type: CREATETABLE
 PREHOOK: Output: SYS@WM_POOLS
@@ -2230,17 +2230,17 @@ TBLPROPERTIES (
 "hive.sql.database.type" = "METASTORE",
 "hive.sql.query" =
 "SELECT
-  WM_RESOURCEPLAN.NAME,
-  WM_POOL.PATH,
-  WM_POOL.ALLOC_FRACTION,
-  WM_POOL.QUERY_PARALLELISM,
-  WM_POOL.SCHEDULING_POLICY
+  \"WM_RESOURCEPLAN\".\"NAME\",
+  \"WM_POOL\".\"PATH\",
+  \"WM_POOL\".\"ALLOC_FRACTION\",
+  \"WM_POOL\".\"QUERY_PARALLELISM\",
+  \"WM_POOL\".\"SCHEDULING_POLICY\"
 FROM
-  WM_POOL
+  \"WM_POOL\"
 JOIN
-  WM_RESOURCEPLAN
+  \"WM_RESOURCEPLAN\"
 ON
-  WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID"
+  \"WM_POOL\".\"RP_ID\" = \"WM_RESOURCEPLAN\".\"RP_ID\""
 )
 POSTHOOK: type: CREATETABLE
 POSTHOOK: Output: SYS@WM_POOLS
@@ -2255,21 +2255,21 @@ TBLPROPERTIES (
 "hive.sql.database.type" = "METASTORE",
 "hive.sql.query" =
 "SELECT
-  WM_RESOURCEPLAN.NAME RP_NAME,
-  WM_POOL.PATH POOL_PATH,
-  WM_TRIGGER.NAME TRIGGER_NAME
-FROM WM_POOL_TO_TRIGGER
-  JOIN WM_POOL ON WM_POOL_TO_TRIGGER.POOL_ID = WM_POOL.POOL_ID
-  JOIN WM_TRIGGER ON WM_POOL_TO_TRIGGER.TRIGGER_ID = WM_TRIGGER.TRIGGER_ID
-  JOIN WM_RESOURCEPLAN ON WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID
+  \"WM_RESOURCEPLAN\".\"NAME\" AS RP_NAME,
+  \"WM_POOL\".\"PATH\" AS POOL_PATH,
+  \"WM_TRIGGER\".\"NAME\" AS TRIGGER_NAME
+FROM \"WM_POOL_TO_TRIGGER\"
+  JOIN \"WM_POOL\" ON \"WM_POOL_TO_TRIGGER\".\"POOL_ID\" = 
\"WM_POOL\".\"POOL_ID\"
+  JOIN \"WM_TRIGGER\" ON \"WM_POOL_TO_TRIGGER\".\"TRIGGER_ID\" = 
\"WM_TRIGGER\".\"TRIGGER_ID\"
+  JOIN \"WM_RESOURCEPLAN\" ON \"WM_POOL\".\"RP_ID\" = 
\"WM_RESOURCEPLAN\".\"RP_ID\"
 UNION
 SELECT
-  WM_RESOURCEPLAN.NAME RP_NAME,
-  '<unmanaged queries>' POOL_PATH,
-  WM_TRIGGER.NAME TRIGGER_NAME
-FROM WM_TRIGGER
-  JOIN WM_RESOURCEPLAN ON WM_TRIGGER.RP_ID = WM_RESOURCEPLAN.RP_ID
-WHERE WM_TRIGGER.IS_IN_UNMANAGED = 1
+  \"WM_RESOURCEPLAN\".\"NAME\" AS RP_NAME,
+  '<unmanaged queries>' AS POOL_PATH,
+  \"WM_TRIGGER\".\"NAME\" AS TRIGGER_NAME
+FROM \"WM_TRIGGER\"
+  JOIN \"WM_RESOURCEPLAN\" ON \"WM_TRIGGER\".\"RP_ID\" = 
\"WM_RESOURCEPLAN\".\"RP_ID\"
+WHERE CAST(\"WM_TRIGGER\".\"IS_IN_UNMANAGED\" AS CHAR) IN ('1', 't')
 "
 )
 PREHOOK: type: CREATETABLE
@@ -2285,21 +2285,21 @@ TBLPROPERTIES (
 "hive.sql.database.type" = "METASTORE",
 "hive.sql.query" =
 "SELECT
-  WM_RESOURCEPLAN.NAME RP_NAME,
-  WM_POOL.PATH POOL_PATH,
-  WM_TRIGGER.NAME TRIGGER_NAME
-FROM WM_POOL_TO_TRIGGER
-  JOIN WM_POOL ON WM_POOL_TO_TRIGGER.POOL_ID = WM_POOL.POOL_ID
-  JOIN WM_TRIGGER ON WM_POOL_TO_TRIGGER.TRIGGER_ID = WM_TRIGGER.TRIGGER_ID
-  JOIN WM_RESOURCEPLAN ON WM_POOL.RP_ID = WM_RESOURCEPLAN.RP_ID
+  \"WM_RESOURCEPLAN\".\"NAME\" AS RP_NAME,
+  \"WM_POOL\".\"PATH\" AS POOL_PATH,
+  \"WM_TRIGGER\".\"NAME\" AS TRIGGER_NAME
+FROM \"WM_POOL_TO_TRIGGER\"
+  JOIN \"WM_POOL\" ON \"WM_POOL_TO_TRIGGER\".\"POOL_ID\" = 
\"WM_POOL\".\"POOL_ID\"
+  JOIN \"WM_TRIGGER\" ON \"WM_POOL_TO_TRIGGER\".\"TRIGGER_ID\" = 
\"WM_TRIGGER\".\"TRIGGER_ID\"
+  JOIN \"WM_RESOURCEPLAN\" ON \"WM_POOL\".\"RP_ID\" = 
\"WM_RESOURCEPLAN\".\"RP_ID\"
 UNION
 SELECT
-  WM_RESOURCEPLAN.NAME RP_NAME,
-  '<unmanaged queries>' POOL_PATH,
-  WM_TRIGGER.NAME TRIGGER_NAME
-FROM WM_TRIGGER
-  JOIN WM_RESOURCEPLAN ON WM_TRIGGER.RP_ID = WM_RESOURCEPLAN.RP_ID
-WHERE WM_TRIGGER.IS_IN_UNMANAGED = 1
+  \"WM_RESOURCEPLAN\".\"NAME\" AS RP_NAME,
+  '<unmanaged queries>' AS POOL_PATH,
+  \"WM_TRIGGER\".\"NAME\" AS TRIGGER_NAME
+FROM \"WM_TRIGGER\"
+  JOIN \"WM_RESOURCEPLAN\" ON \"WM_TRIGGER\".\"RP_ID\" = 
\"WM_RESOURCEPLAN\".\"RP_ID\"
+WHERE CAST(\"WM_TRIGGER\".\"IS_IN_UNMANAGED\" AS CHAR) IN ('1', 't')
 "
 )
 POSTHOOK: type: CREATETABLE
@@ -2317,14 +2317,14 @@ TBLPROPERTIES (
 "hive.sql.database.type" = "METASTORE",
 "hive.sql.query" =
 "SELECT
-  WM_RESOURCEPLAN.NAME,
-  ENTITY_TYPE,
-  ENTITY_NAME,
-  case when WM_POOL.PATH is null then '<unmanaged>' else WM_POOL.PATH end,
-  ORDERING
-FROM WM_MAPPING
-JOIN WM_RESOURCEPLAN ON WM_MAPPING.RP_ID = WM_RESOURCEPLAN.RP_ID
-LEFT OUTER JOIN WM_POOL ON WM_POOL.POOL_ID = WM_MAPPING.POOL_ID
+  \"WM_RESOURCEPLAN\".\"NAME\",
+  \"ENTITY_TYPE\",
+  \"ENTITY_NAME\",
+  case when \"WM_POOL\".\"PATH\" is null then '<unmanaged>' else 
\"WM_POOL\".\"PATH\" end,
+  \"ORDERING\"
+FROM \"WM_MAPPING\"
+JOIN \"WM_RESOURCEPLAN\" ON \"WM_MAPPING\".\"RP_ID\" = 
\"WM_RESOURCEPLAN\".\"RP_ID\"
+LEFT OUTER JOIN \"WM_POOL\" ON \"WM_POOL\".\"POOL_ID\" = 
\"WM_MAPPING\".\"POOL_ID\"
 "
 )
 PREHOOK: type: CREATETABLE
@@ -2342,14 +2342,14 @@ TBLPROPERTIES (
 "hive.sql.database.type" = "METASTORE",
 "hive.sql.query" =
 "SELECT
-  WM_RESOURCEPLAN.NAME,
-  ENTITY_TYPE,
-  ENTITY_NAME,
-  case when WM_POOL.PATH is null then '<unmanaged>' else WM_POOL.PATH end,
-  ORDERING
-FROM WM_MAPPING
-JOIN WM_RESOURCEPLAN ON WM_MAPPING.RP_ID = WM_RESOURCEPLAN.RP_ID
-LEFT OUTER JOIN WM_POOL ON WM_POOL.POOL_ID = WM_MAPPING.POOL_ID
+  \"WM_RESOURCEPLAN\".\"NAME\",
+  \"ENTITY_TYPE\",
+  \"ENTITY_NAME\",
+  case when \"WM_POOL\".\"PATH\" is null then '<unmanaged>' else 
\"WM_POOL\".\"PATH\" end,
+  \"ORDERING\"
+FROM \"WM_MAPPING\"
+JOIN \"WM_RESOURCEPLAN\" ON \"WM_MAPPING\".\"RP_ID\" = 
\"WM_RESOURCEPLAN\".\"RP_ID\"
+LEFT OUTER JOIN \"WM_POOL\" ON \"WM_POOL\".\"POOL_ID\" = 
\"WM_MAPPING\".\"POOL_ID\"
 "
 )
 POSTHOOK: type: CREATETABLE
@@ -2381,18 +2381,26 @@ PREHOOK: query: CREATE VIEW IF NOT EXISTS `SCHEMATA`
   `DEFAULT_CHARACTER_SET_NAME`,
   `SQL_PATH`
 ) AS
-SELECT
+SELECT DISTINCT
   'default',
-  `NAME`,
-  `OWNER_NAME`,
+  D.`NAME`,
+  D.`OWNER_NAME`,
   cast(null as string),
   cast(null as string),
   cast(null as string),
   `DB_LOCATION_URI`
 FROM
-  sys.DBS
+  `sys`.`DBS` D, `sys`.`TBLS` T, `sys`.`TBL_PRIVS` P
+WHERE
+  NOT restrict_information_schema() OR
+  D.`DB_ID` = T.`DB_ID`
+  AND T.`TBL_ID` = P.`TBL_ID`
+  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR 
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: sys@dbs
+PREHOOK: Input: sys@tbl_privs
+PREHOOK: Input: sys@tbls
 PREHOOK: Output: INFORMATION_SCHEMA@SCHEMATA
 PREHOOK: Output: database:information_schema
 POSTHOOK: query: CREATE VIEW IF NOT EXISTS `SCHEMATA`
@@ -2405,27 +2413,35 @@ POSTHOOK: query: CREATE VIEW IF NOT EXISTS `SCHEMATA`
   `DEFAULT_CHARACTER_SET_NAME`,
   `SQL_PATH`
 ) AS
-SELECT
+SELECT DISTINCT
   'default',
-  `NAME`,
-  `OWNER_NAME`,
+  D.`NAME`,
+  D.`OWNER_NAME`,
   cast(null as string),
   cast(null as string),
   cast(null as string),
   `DB_LOCATION_URI`
 FROM
-  sys.DBS
+  `sys`.`DBS` D, `sys`.`TBLS` T, `sys`.`TBL_PRIVS` P
+WHERE
+  NOT restrict_information_schema() OR
+  D.`DB_ID` = T.`DB_ID`
+  AND T.`TBL_ID` = P.`TBL_ID`
+  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR 
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: sys@dbs
+POSTHOOK: Input: sys@tbl_privs
+POSTHOOK: Input: sys@tbls
 POSTHOOK: Output: INFORMATION_SCHEMA@SCHEMATA
 POSTHOOK: Output: database:information_schema
 POSTHOOK: Lineage: SCHEMATA.catalog_name SIMPLE []
 POSTHOOK: Lineage: SCHEMATA.default_character_set_catalog EXPRESSION []
 POSTHOOK: Lineage: SCHEMATA.default_character_set_name EXPRESSION []
 POSTHOOK: Lineage: SCHEMATA.default_character_set_schema EXPRESSION []
-POSTHOOK: Lineage: SCHEMATA.schema_name SIMPLE 
[(dbs)dbs.FieldSchema(name:name, type:string, comment:from deserializer), ]
+POSTHOOK: Lineage: SCHEMATA.schema_name SIMPLE [(dbs)d.FieldSchema(name:name, 
type:string, comment:from deserializer), ]
 #### A masked pattern was here ####
-POSTHOOK: Lineage: SCHEMATA.sql_path SIMPLE 
[(dbs)dbs.FieldSchema(name:db_location_uri, type:string, comment:from 
deserializer), ]
+POSTHOOK: Lineage: SCHEMATA.sql_path SIMPLE 
[(dbs)d.FieldSchema(name:db_location_uri, type:string, comment:from 
deserializer), ]
 PREHOOK: query: CREATE VIEW IF NOT EXISTS `TABLES`
 (
   `TABLE_CATALOG`,
@@ -2441,7 +2457,7 @@ PREHOOK: query: CREATE VIEW IF NOT EXISTS `TABLES`
   `IS_TYPED`,
   `COMMIT_ACTION`
 ) AS
-SELECT
+SELECT DISTINCT
   'default',
   D.NAME,
   T.TBL_NAME,
@@ -2455,11 +2471,16 @@ SELECT
   'NO',
   cast(null as string)
 FROM
-  `sys`.`TBLS` T, `sys`.`DBS` D
+  `sys`.`TBLS` T, `sys`.`DBS` D, `sys`.`TBL_PRIVS` P
 WHERE
   D.`DB_ID` = T.`DB_ID`
+  AND (NOT restrict_information_schema() OR T.`TBL_ID` = P.`TBL_ID`
+  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR 
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
+  AND P.`TBL_PRIV`='SELECT')
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: sys@dbs
+PREHOOK: Input: sys@tbl_privs
 PREHOOK: Input: sys@tbls
 PREHOOK: Output: INFORMATION_SCHEMA@TABLES
 PREHOOK: Output: database:information_schema
@@ -2478,7 +2499,7 @@ POSTHOOK: query: CREATE VIEW IF NOT EXISTS `TABLES`
   `IS_TYPED`,
   `COMMIT_ACTION`
 ) AS
-SELECT
+SELECT DISTINCT
   'default',
   D.NAME,
   T.TBL_NAME,
@@ -2492,11 +2513,16 @@ SELECT
   'NO',
   cast(null as string)
 FROM
-  `sys`.`TBLS` T, `sys`.`DBS` D
+  `sys`.`TBLS` T, `sys`.`DBS` D, `sys`.`TBL_PRIVS` P
 WHERE
   D.`DB_ID` = T.`DB_ID`
+  AND (NOT restrict_information_schema() OR T.`TBL_ID` = P.`TBL_ID`
+  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR 
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
+  AND P.`TBL_PRIV`='SELECT')
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: sys@dbs
+POSTHOOK: Input: sys@tbl_privs
 POSTHOOK: Input: sys@tbls
 POSTHOOK: Output: INFORMATION_SCHEMA@TABLES
 POSTHOOK: Output: database:information_schema
@@ -2523,9 +2549,9 @@ PREHOOK: query: CREATE VIEW IF NOT EXISTS 
`TABLE_PRIVILEGES`
   `IS_GRANTABLE`,
   `WITH_HIERARCHY`
 ) AS
-SELECT
-  `GRANTOR`,
-  `PRINCIPAL_NAME`,
+SELECT DISTINCT
+  P.`GRANTOR`,
+  P.`PRINCIPAL_NAME`,
   'default',
   D.`NAME`,
   T.`TBL_NAME`,
@@ -2533,12 +2559,18 @@ SELECT
   IF (P.`GRANT_OPTION` == 0, 'NO', 'YES'),
   'NO'
 FROM
-  sys.`TBL_PRIVS` P,
-  sys.`TBLS` T,
-  sys.`DBS` D
+  `sys`.`TBL_PRIVS` P,
+  `sys`.`TBLS` T,
+  `sys`.`DBS` D,
+  `sys`.`TBL_PRIVS` P2
 WHERE
-  P.TBL_ID = T.TBL_ID
-  AND T.DB_ID = D.DB_ID
+  P.`TBL_ID` = T.`TBL_ID`
+  AND T.`DB_ID` = D.`DB_ID`
+  AND (NOT restrict_information_schema() OR
+  P.`TBL_ID` = P2.`TBL_ID` AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND 
P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE`
+  AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR 
P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
+  AND P2.`TBL_PRIV`='SELECT')
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: sys@dbs
 PREHOOK: Input: sys@tbl_privs
@@ -2556,9 +2588,9 @@ POSTHOOK: query: CREATE VIEW IF NOT EXISTS 
`TABLE_PRIVILEGES`
   `IS_GRANTABLE`,
   `WITH_HIERARCHY`
 ) AS
-SELECT
-  `GRANTOR`,
-  `PRINCIPAL_NAME`,
+SELECT DISTINCT
+  P.`GRANTOR`,
+  P.`PRINCIPAL_NAME`,
   'default',
   D.`NAME`,
   T.`TBL_NAME`,
@@ -2566,12 +2598,18 @@ SELECT
   IF (P.`GRANT_OPTION` == 0, 'NO', 'YES'),
   'NO'
 FROM
-  sys.`TBL_PRIVS` P,
-  sys.`TBLS` T,
-  sys.`DBS` D
+  `sys`.`TBL_PRIVS` P,
+  `sys`.`TBLS` T,
+  `sys`.`DBS` D,
+  `sys`.`TBL_PRIVS` P2
 WHERE
-  P.TBL_ID = T.TBL_ID
-  AND T.DB_ID = D.DB_ID
+  P.`TBL_ID` = T.`TBL_ID`
+  AND T.`DB_ID` = D.`DB_ID`
+  AND (NOT restrict_information_schema() OR
+  P.`TBL_ID` = P2.`TBL_ID` AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND 
P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE`
+  AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR 
P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
+  AND P2.`TBL_PRIV`='SELECT')
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: sys@dbs
 POSTHOOK: Input: sys@tbl_privs
@@ -2636,7 +2674,7 @@ PREHOOK: query: CREATE VIEW IF NOT EXISTS `COLUMNS`
   `DECLARED_NUMERIC_PRECISION`,
   `DECLARED_NUMERIC_SCALE`
 ) AS
-SELECT
+SELECT DISTINCT
   'default',
   D.NAME,
   T.TBL_NAME,
@@ -2725,18 +2763,26 @@ SELECT
        WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
        ELSE null END
 FROM
-  sys.`COLUMNS_V2` C,
-  sys.`SDS` S,
-  sys.`TBLS` T,
-  sys.`DBS` D
+  `sys`.`COLUMNS_V2` C,
+  `sys`.`SDS` S,
+  `sys`.`TBLS` T,
+  `sys`.`DBS` D,
+  `sys`.`TBL_COL_PRIVS` P
 WHERE
   S.`SD_ID` = T.`SD_ID`
   AND T.`DB_ID` = D.`DB_ID`
   AND C.`CD_ID` = S.`CD_ID`
+  AND (NOT restrict_information_schema() OR
+  T.`TBL_ID` = P.`TBL_ID`
+  AND C.`COLUMN_NAME` = P.`COLUMN_NAME`
+  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR 
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
+  AND P.`TBL_COL_PRIV`='SELECT')
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: sys@columns_v2
 PREHOOK: Input: sys@dbs
 PREHOOK: Input: sys@sds
+PREHOOK: Input: sys@tbl_col_privs
 PREHOOK: Input: sys@tbls
 PREHOOK: Output: INFORMATION_SCHEMA@COLUMNS
 PREHOOK: Output: database:information_schema
@@ -2790,7 +2836,7 @@ POSTHOOK: query: CREATE VIEW IF NOT EXISTS `COLUMNS`
   `DECLARED_NUMERIC_PRECISION`,
   `DECLARED_NUMERIC_SCALE`
 ) AS
-SELECT
+SELECT DISTINCT
   'default',
   D.NAME,
   T.TBL_NAME,
@@ -2879,18 +2925,26 @@ SELECT
        WHEN lower(C.TYPE_NAME) like 'numeric%' THEN 10
        ELSE null END
 FROM
-  sys.`COLUMNS_V2` C,
-  sys.`SDS` S,
-  sys.`TBLS` T,
-  sys.`DBS` D
+  `sys`.`COLUMNS_V2` C,
+  `sys`.`SDS` S,
+  `sys`.`TBLS` T,
+  `sys`.`DBS` D,
+  `sys`.`TBL_COL_PRIVS` P
 WHERE
   S.`SD_ID` = T.`SD_ID`
   AND T.`DB_ID` = D.`DB_ID`
   AND C.`CD_ID` = S.`CD_ID`
+  AND (NOT restrict_information_schema() OR
+  T.`TBL_ID` = P.`TBL_ID`
+  AND C.`COLUMN_NAME` = P.`COLUMN_NAME`
+  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR 
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
+  AND P.`TBL_COL_PRIV`='SELECT')
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: sys@columns_v2
 POSTHOOK: Input: sys@dbs
 POSTHOOK: Input: sys@sds
+POSTHOOK: Input: sys@tbl_col_privs
 POSTHOOK: Input: sys@tbls
 POSTHOOK: Output: INFORMATION_SCHEMA@COLUMNS
 POSTHOOK: Output: database:information_schema
@@ -2952,32 +3006,35 @@ PREHOOK: query: CREATE VIEW IF NOT EXISTS 
`COLUMN_PRIVILEGES`
   `PRIVILEGE_TYPE`,
   `IS_GRANTABLE`
 ) AS
-SELECT
-  `GRANTOR`,
-  `PRINCIPAL_NAME`,
+SELECT DISTINCT
+  P.`GRANTOR`,
+  P.`PRINCIPAL_NAME`,
   'default',
   D.`NAME`,
   T.`TBL_NAME`,
-  C.`COLUMN_NAME`,
+  P.`COLUMN_NAME`,
   P.`TBL_COL_PRIV`,
   IF (P.`GRANT_OPTION` == 0, 'NO', 'YES')
 FROM
-  sys.`TBL_COL_PRIVS` P,
-  sys.`TBLS` T,
-  sys.`DBS` D,
-  sys.`COLUMNS_V2` C,
-  sys.`SDS` S
+  `sys`.`TBL_COL_PRIVS` P,
+  `sys`.`TBLS` T,
+  `sys`.`DBS` D,
+  `sys`.`SDS` S,
+  `sys`.`TBL_PRIVS` P2
 WHERE
   S.`SD_ID` = T.`SD_ID`
   AND T.`DB_ID` = D.`DB_ID`
   AND P.`TBL_ID` = T.`TBL_ID`
-  AND P.`COLUMN_NAME` = C.`COLUMN_NAME`
-  AND C.`CD_ID` = S.`CD_ID`
+  AND (NOT restrict_information_schema() OR
+  P.`TBL_ID` = P2.`TBL_ID` AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND 
P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE`
+  AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR 
P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
+  AND P2.`TBL_PRIV`='SELECT')
 PREHOOK: type: CREATEVIEW
-PREHOOK: Input: sys@columns_v2
 PREHOOK: Input: sys@dbs
 PREHOOK: Input: sys@sds
 PREHOOK: Input: sys@tbl_col_privs
+PREHOOK: Input: sys@tbl_privs
 PREHOOK: Input: sys@tbls
 PREHOOK: Output: INFORMATION_SCHEMA@COLUMN_PRIVILEGES
 PREHOOK: Output: database:information_schema
@@ -2992,36 +3049,39 @@ POSTHOOK: query: CREATE VIEW IF NOT EXISTS 
`COLUMN_PRIVILEGES`
   `PRIVILEGE_TYPE`,
   `IS_GRANTABLE`
 ) AS
-SELECT
-  `GRANTOR`,
-  `PRINCIPAL_NAME`,
+SELECT DISTINCT
+  P.`GRANTOR`,
+  P.`PRINCIPAL_NAME`,
   'default',
   D.`NAME`,
   T.`TBL_NAME`,
-  C.`COLUMN_NAME`,
+  P.`COLUMN_NAME`,
   P.`TBL_COL_PRIV`,
   IF (P.`GRANT_OPTION` == 0, 'NO', 'YES')
 FROM
-  sys.`TBL_COL_PRIVS` P,
-  sys.`TBLS` T,
-  sys.`DBS` D,
-  sys.`COLUMNS_V2` C,
-  sys.`SDS` S
+  `sys`.`TBL_COL_PRIVS` P,
+  `sys`.`TBLS` T,
+  `sys`.`DBS` D,
+  `sys`.`SDS` S,
+  `sys`.`TBL_PRIVS` P2
 WHERE
   S.`SD_ID` = T.`SD_ID`
   AND T.`DB_ID` = D.`DB_ID`
   AND P.`TBL_ID` = T.`TBL_ID`
-  AND P.`COLUMN_NAME` = C.`COLUMN_NAME`
-  AND C.`CD_ID` = S.`CD_ID`
+  AND (NOT restrict_information_schema() OR
+  P.`TBL_ID` = P2.`TBL_ID` AND P.`PRINCIPAL_NAME` = P2.`PRINCIPAL_NAME` AND 
P.`PRINCIPAL_TYPE` = P2.`PRINCIPAL_TYPE`
+  AND (P2.`PRINCIPAL_NAME`=current_user() AND P2.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P2.`PRINCIPAL_NAME`) OR 
P2.`PRINCIPAL_NAME` = 'public') AND P2.`PRINCIPAL_TYPE`='GROUP'))
+  AND P2.`TBL_PRIV`='SELECT')
 POSTHOOK: type: CREATEVIEW
-POSTHOOK: Input: sys@columns_v2
 POSTHOOK: Input: sys@dbs
 POSTHOOK: Input: sys@sds
 POSTHOOK: Input: sys@tbl_col_privs
+POSTHOOK: Input: sys@tbl_privs
 POSTHOOK: Input: sys@tbls
 POSTHOOK: Output: INFORMATION_SCHEMA@COLUMN_PRIVILEGES
 POSTHOOK: Output: database:information_schema
-POSTHOOK: Lineage: COLUMN_PRIVILEGES.column_name SIMPLE 
[(columns_v2)c.FieldSchema(name:column_name, type:string, comment:from 
deserializer), ]
+POSTHOOK: Lineage: COLUMN_PRIVILEGES.column_name SIMPLE 
[(tbl_col_privs)p.FieldSchema(name:column_name, type:string, comment:from 
deserializer), ]
 POSTHOOK: Lineage: COLUMN_PRIVILEGES.grantee SIMPLE 
[(tbl_col_privs)p.FieldSchema(name:principal_name, type:string, comment:from 
deserializer), ]
 POSTHOOK: Lineage: COLUMN_PRIVILEGES.grantor SIMPLE 
[(tbl_col_privs)p.FieldSchema(name:grantor, type:string, comment:from 
deserializer), ]
 POSTHOOK: Lineage: COLUMN_PRIVILEGES.is_grantable EXPRESSION 
[(tbl_col_privs)p.FieldSchema(name:grant_option, type:int, comment:from 
deserializer), ]
@@ -3042,7 +3102,7 @@ PREHOOK: query: CREATE VIEW IF NOT EXISTS `VIEWS`
   `IS_TRIGGER_DELETABLE`,
   `IS_TRIGGER_INSERTABLE_INTO`
 ) AS
-SELECT
+SELECT DISTINCT
   'default',
   D.NAME,
   T.TBL_NAME,
@@ -3055,12 +3115,19 @@ SELECT
   false
 FROM
   `sys`.`DBS` D,
-  `sys`.`TBLS` T
+  `sys`.`TBLS` T,
+  `sys`.`TBL_PRIVS` P
 WHERE
-   D.`DB_ID` = T.`DB_ID` AND
-   length(T.VIEW_ORIGINAL_TEXT) > 0
+  D.`DB_ID` = T.`DB_ID`
+  AND length(T.VIEW_ORIGINAL_TEXT) > 0
+  AND (NOT restrict_information_schema() OR
+  T.`TBL_ID` = P.`TBL_ID`
+  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR 
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
+  AND P.`TBL_PRIV`='SELECT')
 PREHOOK: type: CREATEVIEW
 PREHOOK: Input: sys@dbs
+PREHOOK: Input: sys@tbl_privs
 PREHOOK: Input: sys@tbls
 PREHOOK: Output: INFORMATION_SCHEMA@VIEWS
 PREHOOK: Output: database:information_schema
@@ -3077,7 +3144,7 @@ POSTHOOK: query: CREATE VIEW IF NOT EXISTS `VIEWS`
   `IS_TRIGGER_DELETABLE`,
   `IS_TRIGGER_INSERTABLE_INTO`
 ) AS
-SELECT
+SELECT DISTINCT
   'default',
   D.NAME,
   T.TBL_NAME,
@@ -3090,12 +3157,19 @@ SELECT
   false
 FROM
   `sys`.`DBS` D,
-  `sys`.`TBLS` T
+  `sys`.`TBLS` T,
+  `sys`.`TBL_PRIVS` P
 WHERE
-   D.`DB_ID` = T.`DB_ID` AND
-   length(T.VIEW_ORIGINAL_TEXT) > 0
+  D.`DB_ID` = T.`DB_ID`
+  AND length(T.VIEW_ORIGINAL_TEXT) > 0
+  AND (NOT restrict_information_schema() OR
+  T.`TBL_ID` = P.`TBL_ID`
+  AND (P.`PRINCIPAL_NAME`=current_user() AND P.`PRINCIPAL_TYPE`='USER'
+    OR ((array_contains(current_groups(), P.`PRINCIPAL_NAME`) OR 
P.`PRINCIPAL_NAME` = 'public') AND P.`PRINCIPAL_TYPE`='GROUP'))
+  AND P.`TBL_PRIV`='SELECT')
 POSTHOOK: type: CREATEVIEW
 POSTHOOK: Input: sys@dbs
+POSTHOOK: Input: sys@tbl_privs
 POSTHOOK: Input: sys@tbls
 POSTHOOK: Output: INFORMATION_SCHEMA@VIEWS
 POSTHOOK: Output: database:information_schema
@@ -3149,7 +3223,7 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
-plan_1 DISABLED        NULL    default
+plan_1 DISABLED        0       default
 PREHOOK: query: CREATE RESOURCE PLAN plan_2 WITH QUERY_PARALLELISM=5
 PREHOOK: type: CREATE RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3183,7 +3257,7 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
-plan_1 DISABLED        NULL    default
+plan_1 DISABLED        0       default
 plan_2 DISABLED        10      default
 FAILED: SemanticException Invalid create arguments (tok_create_rp plan_3 
(tok_query_parallelism 5) (tok_default_pool all))
 PREHOOK: query: ALTER RESOURCE PLAN plan_1 RENAME TO plan_2
@@ -3198,7 +3272,7 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
-plan_1 DISABLED        NULL    default
+plan_1 DISABLED        0       default
 plan_2 DISABLED        10      default
 PREHOOK: query: ALTER RESOURCE PLAN plan_1 RENAME TO plan_3
 PREHOOK: type: ALTER RESOURCEPLAN
@@ -3214,7 +3288,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 DISABLED        NULL    default
+plan_3 DISABLED        0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 4
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3244,7 +3318,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 DISABLED        NULL    default
+plan_3 DISABLED        0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 SET QUERY_PARALLELISM = 30, DEFAULT 
POOL = default1
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3258,7 +3332,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 DISABLED        NULL    default
+plan_3 DISABLED        0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 ENABLE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3286,7 +3360,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 DISABLED        NULL    default
+plan_3 DISABLED        0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 ACTIVATE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3300,7 +3374,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 DISABLED        NULL    default
+plan_3 DISABLED        0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 DISABLE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3315,7 +3389,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 DISABLED        NULL    default
+plan_3 DISABLED        0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 ENABLE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3330,7 +3404,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 ENABLED NULL    default
+plan_3 ENABLED 0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 ACTIVATE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3345,7 +3419,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 ACTIVE  NULL    default
+plan_3 ACTIVE  0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 ACTIVATE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3360,7 +3434,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 ACTIVE  NULL    default
+plan_3 ACTIVE  0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 ENABLE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3374,7 +3448,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 ACTIVE  NULL    default
+plan_3 ACTIVE  0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 DISABLE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3388,7 +3462,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 ACTIVE  NULL    default
+plan_3 ACTIVE  0       default
 PREHOOK: query: DISABLE WORKLOAD MANAGEMENT
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3403,7 +3477,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 ENABLED NULL    default
+plan_3 ENABLED 0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 DISABLE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3423,7 +3497,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 DISABLED        10      default
-plan_3 ACTIVE  NULL    default
+plan_3 ACTIVE  0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_2 ENABLE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3438,7 +3512,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 ENABLED 10      default
-plan_3 ACTIVE  NULL    default
+plan_3 ACTIVE  0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_2 ACTIVATE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3453,7 +3527,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 ACTIVE  10      default
-plan_3 ENABLED NULL    default
+plan_3 ENABLED 0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 ENABLE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3468,7 +3542,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 ACTIVE  10      default
-plan_3 ENABLED NULL    default
+plan_3 ENABLED 0       default
 PREHOOK: query: ALTER RESOURCE PLAN plan_3 DISABLE
 PREHOOK: type: ALTER RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3483,7 +3557,7 @@ POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
 plan_2 ACTIVE  10      default
-plan_3 DISABLED        NULL    default
+plan_3 DISABLED        0       default
 PREHOOK: query: DROP RESOURCE PLAN plan_2
 PREHOOK: type: DROP RESOURCEPLAN
 PREHOOK: Output: dummyHostnameForTest
@@ -3669,7 +3743,7 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
-plan_1 ENABLED NULL    default
+plan_1 ENABLED 0       default
 plan_2 ACTIVE  10      default
 table  DISABLED        1       default
 PREHOOK: query: DROP TRIGGER plan_1.trigger_2
@@ -3693,7 +3767,7 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
-plan_1 ACTIVE  NULL    default
+plan_1 ACTIVE  0       default
 plan_2 ENABLED 10      default
 table  DISABLED        1       default
 PREHOOK: query: DROP TRIGGER plan_1.trigger_2
@@ -3999,7 +4073,7 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
-plan_1 ACTIVE  NULL    default
+plan_1 ACTIVE  0       default
 plan_2 DISABLED        10      def
 table  DISABLED        1       default
 PREHOOK: query: ALTER RESOURCE PLAN `table` SET DEFAULT POOL = `table`.pool
@@ -4041,7 +4115,7 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
-plan_1 ACTIVE  NULL    default
+plan_1 ACTIVE  0       default
 plan_2 DISABLED        10      def
 table  DISABLED        1       NULL
 PREHOOK: query: ALTER POOL plan_2.def.c1 ADD TRIGGER trigger_1
@@ -4337,9 +4411,9 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
-plan_1 ENABLED NULL    default
-plan_2 DISABLED        NULL    default
-plan_4 ACTIVE  NULL    default
+plan_1 ENABLED 0       default
+plan_2 DISABLED        0       default
+plan_4 ACTIVE  0       default
 table  DISABLED        1       NULL
 PREHOOK: query: SELECT * FROM SYS.WM_POOLS
 PREHOOK: type: QUERY
@@ -4432,11 +4506,11 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
-plan_1 ENABLED NULL    default
-plan_2 DISABLED        NULL    default
-plan_4 ACTIVE  NULL    default
-plan_4a        DISABLED        NULL    default
-plan_4b        DISABLED        NULL    default
+plan_1 ENABLED 0       default
+plan_2 DISABLED        0       default
+plan_4 ACTIVE  0       default
+plan_4a        DISABLED        0       default
+plan_4b        DISABLED        0       default
 table  DISABLED        1       NULL
 PREHOOK: query: SELECT * FROM SYS.WM_POOLS
 PREHOOK: type: QUERY
@@ -4507,11 +4581,11 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
-plan_1 ENABLED NULL    default
-plan_2 DISABLED        NULL    default
-plan_4 ACTIVE  NULL    default
-plan_4a        DISABLED        NULL    default
-plan_4a_old_0  DISABLED        NULL    default
+plan_1 ENABLED 0       default
+plan_2 DISABLED        0       default
+plan_4 ACTIVE  0       default
+plan_4a        DISABLED        0       default
+plan_4a_old_0  DISABLED        0       default
 table  DISABLED        1       NULL
 PREHOOK: query: SELECT * FROM SYS.WM_POOLS
 PREHOOK: type: QUERY
@@ -4557,11 +4631,11 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
-plan_1 ENABLED NULL    default
-plan_2 DISABLED        NULL    default
-plan_4 ACTIVE  NULL    default
-plan_4_old_0   DISABLED        NULL    default
-plan_4a_old_0  DISABLED        NULL    default
+plan_1 ENABLED 0       default
+plan_2 DISABLED        0       default
+plan_4 ACTIVE  0       default
+plan_4_old_0   DISABLED        0       default
+plan_4a_old_0  DISABLED        0       default
 table  DISABLED        1       NULL
 PREHOOK: query: CREATE RESOURCE PLAN plan_4a LIKE plan_4
 PREHOOK: type: CREATE RESOURCEPLAN
@@ -4586,12 +4660,12 @@ POSTHOOK: query: SELECT * FROM SYS.WM_RESOURCEPLANS
 POSTHOOK: type: QUERY
 POSTHOOK: Input: sys@wm_resourceplans
 #### A masked pattern was here ####
-plan_1 ENABLED NULL    default
-plan_2 DISABLED        NULL    default
-plan_4 ACTIVE  NULL    default
-plan_4_old_0   DISABLED        NULL    default
-plan_4_old_1   DISABLED        NULL    default
-plan_4a_old_0  DISABLED        NULL    default
+plan_1 ENABLED 0       default
+plan_2 DISABLED        0       default
+plan_4 ACTIVE  0       default
+plan_4_old_0   DISABLED        0       default
+plan_4_old_1   DISABLED        0       default
+plan_4a_old_0  DISABLED        0       default
 table  DISABLED        1       NULL
 PREHOOK: query: SELECT * FROM SYS.WM_POOLS
 PREHOOK: type: QUERY

http://git-wip-us.apache.org/repos/asf/hive/blob/61ec445c/ql/src/test/results/clientpositive/show_functions.q.out
----------------------------------------------------------------------
diff --git a/ql/src/test/results/clientpositive/show_functions.q.out 
b/ql/src/test/results/clientpositive/show_functions.q.out
index 02b956b..923a967 100644
--- a/ql/src/test/results/clientpositive/show_functions.q.out
+++ b/ql/src/test/results/clientpositive/show_functions.q.out
@@ -65,6 +65,7 @@ create_union
 cume_dist
 current_database
 current_date
+current_groups
 current_timestamp
 current_user
 date_add
@@ -212,6 +213,7 @@ regr_syy
 repeat
 replace
 replicate_rows
+restrict_information_schema
 reverse
 rlike
 round
@@ -319,6 +321,7 @@ create_union
 cume_dist
 current_database
 current_date
+current_groups
 current_timestamp
 current_user
 PREHOOK: query: SHOW FUNCTIONS '.*e$'

Reply via email to