This is an automated email from the ASF dual-hosted git repository.

daijy pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git


The following commit(s) were added to refs/heads/master by this push:
     new 46f45e3d HIVE-21672: HiveServer2 needs to support sidecar's 'ldap.xml' 
file (Sam An, reviewed by Daniel Dai)
46f45e3d is described below

commit 46f45e3d417eaede320b1088a37721db5901267d
Author: Sam An <sam...@cloudera.com>
AuthorDate: Fri May 3 14:26:17 2019 -0700

    HIVE-21672: HiveServer2 needs to support sidecar's 'ldap.xml' file (Sam An, 
reviewed by Daniel Dai)
    
    Signed-off-by: Daniel Dai <dai...@gmail.com>
---
 .../java/org/apache/hadoop/hive/conf/HiveConf.java | 18 ++++++-
 .../org/apache/hadoop/hive/conf/TestHiveConf.java  | 57 +++++++++++++++++++++-
 2 files changed, 73 insertions(+), 2 deletions(-)

diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java 
b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 0c2bd1e..049e837 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -4703,7 +4703,11 @@ public class HiveConf extends Configuration {
             "This parameter enables a number of optimizations when running on 
blobstores:\n" +
             "(1) If hive.blobstore.use.blobstore.as.scratchdir is false, force 
the last Hive job to write to the blobstore.\n" +
             "This is a performance optimization that forces the final 
FileSinkOperator to write to the blobstore.\n" +
-            "See HIVE-15121 for details.");
+            "See HIVE-15121 for details."),
+
+    HIVE_ADDITIONAL_CONFIG_FILES("hive.additional.config.files", "",
+            "The names of additional config files, such as ldap-site.xml," +
+                    "spark-site.xml, etc in comma separated list.");
 
     public final String varname;
     public final String altName;
@@ -5474,6 +5478,18 @@ public class HiveConf extends Configuration {
       addResource(hiveServer2SiteUrl);
     }
 
+    String val = this.getVar(HiveConf.ConfVars.HIVE_ADDITIONAL_CONFIG_FILES);
+    ClassLoader classLoader = Thread.currentThread().getContextClassLoader();
+
+    if (val != null && !val.isEmpty()) {
+      String[] configFiles = val.split(",");
+      for (String config : configFiles) {
+        URL configURL = findConfigFile(classLoader, config, true);
+        if (configURL != null) {
+          addResource(configURL);
+        }
+      }
+    }
     // Overlay the values of any system properties and manual overrides
     applySystemProperties();
 
diff --git a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java 
b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
index 780a708..bf9dee7 100644
--- a/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
+++ b/common/src/test/org/apache/hadoop/hive/conf/TestHiveConf.java
@@ -18,15 +18,17 @@
 package org.apache.hadoop.hive.conf;
 
 import com.google.common.collect.Lists;
+import org.apache.commons.io.FileUtils;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
-import org.apache.hadoop.util.Shell;
 import org.apache.hive.common.util.HiveTestUtils;
 import org.junit.Assert;
 import org.junit.Test;
 
+import java.io.File;
 import java.io.UnsupportedEncodingException;
+import java.net.URL;
 import java.net.URLEncoder;
 import java.util.ArrayList;
 import java.util.concurrent.TimeUnit;
@@ -189,4 +191,57 @@ public class TestHiveConf {
     Assert.assertEquals(URLEncoder.encode(query, "UTF-8"), 
conf.get(ConfVars.HIVEQUERYSTRING.varname));
     Assert.assertEquals(query, conf.getQueryString());
   }
+
+  @Test
+  public void testAdditionalConfigFiles() throws Exception{
+    URL url = ClassLoader.getSystemResource("hive-site.xml");
+    File fileHiveSite = new File(url.getPath());
+
+    String parFolder = fileHiveSite.getParent();
+    //back up hive-site.xml
+    String bakHiveSiteFileName = parFolder + "/hive-site-bak.xml";
+    File fileBakHiveSite = new File(bakHiveSiteFileName);
+    FileUtils.copyFile(fileHiveSite, fileBakHiveSite);
+
+    String content = FileUtils.readFileToString(fileHiveSite);
+    content = content.substring(0, content.lastIndexOf("</configuration>"));
+
+    String testHiveSiteString = content + "<property>\n" +
+            " <name>HIVE_SERVER2_PLAIN_LDAP_DOMAIN</name>\n" +
+            " <value>a.com</value>\n" +
+            "</property>\n" +
+            "\n" +
+            " <property>\n" +
+            "   <name>hive.additional.config.files</name>\n" +
+            "   <value>ldap-site.xml,other.xml</value>\n" +
+            "   <description>additional config dir for Hive to 
load</description>\n" +
+            " </property>\n" +
+            "\n" +
+            "</configuration>";
+
+    FileUtils.writeStringToFile(fileHiveSite, testHiveSiteString);
+
+    String testLdapString = "<?xml version=\"1.0\"?>\n" +
+            "<?xml-stylesheet type=\"text/xsl\" 
href=\"configuration.xsl\"?>\n" +
+            "<configuration>\n" +
+            "  <property>\n" +
+            "  <name>hive.server2.authentication.ldap.Domain</name>\n" +
+            "  <value>b.com</value>\n" +
+            "</property>\n" +
+            "\n" +
+            "</configuration>";
+
+
+    String newFileName = parFolder+"/ldap-site.xml";
+    File f2 = new File(newFileName);
+    FileUtils.writeStringToFile(f2, testLdapString);
+
+    HiveConf conf = new HiveConf();
+    String val = conf.getVar(ConfVars.HIVE_SERVER2_PLAIN_LDAP_DOMAIN);
+    Assert.assertEquals("b.com", val);
+    //restore and clean up
+    FileUtils.copyFile(fileBakHiveSite, fileHiveSite);
+    f2.delete();
+    fileBakHiveSite.delete();
+  }
 }

Reply via email to